text
stringlengths
1
1.05M
<gh_stars>1-10 package net.librec.spark.math.structure import net.librec.math.structure import net.librec.spark.data.Rating /** * IndexedVector * * @param index * @param vector * * @author WangYuFeng */ abstract case class IndexedVector(index: Int, vector: Vector) extends Vector with Serializable { /** * get index * @return */ def getIndex: Int = index /** * get values * @return */ def values: Array[Double] = vector.values override def toString: String = { "Index: " + getIndex + " " + super.toString } } /** * IndexedDenseVector * * @param index * @param vector */ class IndexedDenseVector(index: Int, vector: DenseVector) extends IndexedVector(index, vector) { /** * get ID * * @return userID or itemID */ override def getIndex: Int = index /** * Size of the vector. */ override def size: Int = vector.size /** * Converts the instance to a double array. */ override def toArray: Array[Double] = vector.toArray /** * Convert this vector to the new mllib-local representation. */ override def asLocalVector: structure.Vector = vector.asLocalVector /** * @see net.librec.spark.math.structure#foreachActive((Int, Double) => Unit) */ override def foreachActive(f: (Int, Double) => Unit): Unit = vector.foreachActive(f: (Int, Double) => Unit) } /** * IndexedSparseVector * * @param index * @param vector */ class IndexedSparseVector(index: Int, vector: SparseVector) extends IndexedVector(index, vector) { def this(userOrItem: String, ratingsToConvert: Iterable[Rating], size: Int) = { this(IndexedSparseVector.getIndexFromRatings(userOrItem, ratingsToConvert), IndexedSparseVector.parseToSparseVector(ratingsToConvert, size)) } /** * get ID * * @return userID or itemID */ override def getIndex: Int = index /** * Size of the vector. */ override def size: Int = vector.size /** * Converts the instance to a double array. */ override def toArray: Array[Double] = vector.toArray /** * Convert this vector to the new mllib-local representation. */ override def asLocalVector: structure.Vector = vector.asLocalVector /** * @see net.librec.spark.math.structure#foreachActive((Int, Double) => Unit) */ override def foreachActive(f: (Int, Double) => Unit): Unit = vector.foreachActive(f: (Int, Double) => Unit) } object IndexedSparseVector { /** * Parse ratings to SparseVector. * * @param ratings * @param size * @return */ def parseToSparseVector(ratings: Iterable[Rating], size: Int): SparseVector = { val sorted = ratings.map(rat => (rat.item, rat.rate)).toList.sortBy(_._1) val products = sorted.map(rat => rat._1).toArray val rats = sorted.map(rat => rat._2).toArray new SparseVector(size, products, rats) } /** * Get user's or item's ID from ratings. Check if all ratings are from the same user. * * @param userOrItem user or item * @param ratingsToConvert convert to ratings * @return id */ protected def getIndexFromRatings(userOrItem: String, ratingsToConvert: Iterable[Rating]): Int = { if (ratingsToConvert.nonEmpty) { if (userOrItem.equals("user")) ratingsToConvert.head.user else if (userOrItem.equals("item")) ratingsToConvert.head.user else 0 } else 0 } def apply(index: Int, size: Int, indices: Array[Int], ratings: Array[Double]): IndexedSparseVector = new IndexedSparseVector(index, new SparseVector(size, indices, ratings)) }
package com.symulakr.dinstar.smsserver.message.body; import com.symulakr.dinstar.smsserver.common.ToBytes; public abstract class ResponseBody implements ToBytes { private byte[] bytes; protected abstract byte[] createBody(); public int getLength() { return toBytes().length; } @Override public byte[] toBytes() { if (bytes == null) { bytes = createBody(); } return bytes; } }
def dfs_traversal(graph, start_vertex): visited = set() traversal_order = [] def dfs_helper(vertex): visited.add(vertex) traversal_order.append(vertex) for neighbor in graph[vertex]: if neighbor not in visited: dfs_helper(neighbor) dfs_helper(start_vertex) return traversal_order
CUDA_VISIBLE_DEVICES=6 python train.py \ --dataset cifar10 \ --num-labeled 40 \ --arch wideresnet \ --batch-size 64 \ --lr 0.03 \ --expand-labels \ --seed 5 \ --resume results/epoch_900/checkpoint.pth.tar\ --out results/epoch_900
<reponame>mufeili/GNNLens2<filename>vis_src/src/components/DataRuns/index.tsx<gh_stars>10-100 import DataRuns from './DataRuns' export default DataRuns;
package lucandra; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; import java.util.*; import java.util.concurrent.ConcurrentNavigableMap; import java.util.concurrent.ConcurrentSkipListMap; import org.apache.cassandra.db.*; import org.apache.cassandra.thrift.ColumnParent; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.log4j.Logger; import org.apache.lucene.index.Term; public class TermCache { private final static Term emptyTerm = new Term(""); private final static ConcurrentNavigableMap<Term, LucandraTermInfo[]> emptyMap = new ConcurrentSkipListMap<Term, LucandraTermInfo[]>(); private final static ColumnParent fieldColumnFamily = new ColumnParent( CassandraUtils.metaInfoColumnFamily); private final static Logger logger = Logger .getLogger(TermCache.class); public final String indexName; public final ByteBuffer termsListKey; public final ConcurrentSkipListMap<Term, LucandraTermInfo[]> termList; public final ConcurrentSkipListMap<Term, Pair<Term, Term>> termQueryBoundries; public TermCache(String indexName) throws IOException { this.indexName = indexName; termsListKey = CassandraUtils.hashKeyBytes(indexName.getBytes("UTF-8"), CassandraUtils.delimeterBytes, "terms" .getBytes("UTF-8")); termList = new ConcurrentSkipListMap<Term, LucandraTermInfo[]>(); // Get the boundries of terms each term termQueryBoundries = new ConcurrentSkipListMap<Term, Pair<Term, Term>>(); } // Cache check only public LucandraTermInfo[] get(Term term) { return termList.get(term); } public ConcurrentNavigableMap<Term, LucandraTermInfo[]> skipTo(Term skip) throws IOException { Pair<Term, Term> range = null; int bufferSize = termList.isEmpty() ? 1 : 3; // verify we've buffered sufficiently Map.Entry<Term, Pair<Term, Term>> tailEntry = termQueryBoundries.ceilingEntry(skip); boolean needsBuffering = true; if (tailEntry != null) { range = tailEntry.getValue(); //skip term must be within a buffered range avoid rebuffering if (skip.compareTo(range.left) >= 0 && (!range.right.equals(emptyTerm) && skip.compareTo(range.right) < 0)) { needsBuffering = false; } } ConcurrentNavigableMap<Term, LucandraTermInfo[]> subList = emptyMap; if (needsBuffering) { range = bufferTerms(skip, bufferSize); } //logger.info(Thread.currentThread().getName()+" rebuffered "+needsBuffering+" "+range); if (skip.compareTo(range.left) >= 0 && (!range.right.equals(emptyTerm)) && skip.compareTo(range.right) <= 0) { subList = termList.subMap(skip, true, range.right, true); } return subList; } public static LucandraTermInfo[] convertTermInfo(Collection<IColumn> docs) { LucandraTermInfo termInfo[] = new LucandraTermInfo[docs.size()]; int i = 0; for (IColumn col : docs) { if (i == termInfo.length) break; if (i == 0 && col instanceof SuperColumn) throw new IllegalStateException( "TermInfo ColumnFamily is a of type Super: This is no longer supported, please see NEWS.txt"); if (col == null || col.name() == null || col.value() == null) throw new IllegalStateException("Encountered missing column: " + col); termInfo[i] = new LucandraTermInfo(CassandraUtils.readVInt(col.name()), col.value()); i++; } return termInfo; } public Pair<Term, Term> bufferTerms(Term startTerm, int bufferSize) throws IOException { assert bufferSize > 0; long start = System.currentTimeMillis(); // Scan range of terms in this field (reversed, so we have a exit point) List<Row> rows = CassandraUtils.robustRead(CassandraUtils.consistency, new SliceFromReadCommand( CassandraUtils.keySpace, termsListKey, fieldColumnFamily, CassandraUtils.createColumnName(startTerm), ByteBufferUtil.EMPTY_BYTE_BUFFER, false, bufferSize)); ColumnParent columnParent = new ColumnParent(CassandraUtils.termVecColumnFamily); // Collect read commands Collection<IColumn> columns; if (rows == null || rows.size() != 1 || rows.get(0).cf == null) { columns = new ArrayList<IColumn>(); } else { columns = rows.get(0).cf.getSortedColumns(); if (logger.isDebugEnabled()) logger.debug("Found " + columns.size() + " terms under field " + startTerm.field()); } Pair<Term, Term> queryRange; if (!columns.isEmpty()) { //end of range will get filled in later queryRange = new Pair<Term, Term>(startTerm, null); } else { queryRange = new Pair<Term, Term>(startTerm, emptyTerm); termQueryBoundries.put(startTerm, queryRange); return queryRange; } Map<Term, Pair<Term, Term>> localRanges = new HashMap<Term, Pair<Term, Term>>(columns.size()); localRanges.put(startTerm, queryRange); List<ReadCommand> reads = new ArrayList<ReadCommand>(columns.size()); for (IColumn column : columns) { if(!column.isLive() || column instanceof DeletedColumn) continue; Term term = CassandraUtils.parseTerm(ByteBufferUtil.string(column.name(), CassandraUtils.UTF_8)); localRanges.put(term, queryRange); ByteBuffer rowKey; try { rowKey = CassandraUtils.hashKeyBytes(indexName.getBytes("UTF-8"), CassandraUtils.delimeterBytes, term .field().getBytes("UTF-8"), CassandraUtils.delimeterBytes, term.text().getBytes("UTF-8")); } catch (UnsupportedEncodingException e) { throw new RuntimeException("This JVM doesn't support UTF-8"); } if (logger.isDebugEnabled()) logger.debug("scanning row: " + ByteBufferUtil.string(rowKey)); reads.add((ReadCommand) new SliceFromReadCommand(CassandraUtils.keySpace, rowKey, columnParent, ByteBufferUtil.EMPTY_BYTE_BUFFER, ByteBufferUtil.EMPTY_BYTE_BUFFER, false, Integer.MAX_VALUE)); } rows = CassandraUtils.robustRead(CassandraUtils.consistency, reads.toArray(new ReadCommand[] {})); // term to start with next time int actualReadSize = rows.size(); if (logger.isDebugEnabled()) { logger.debug("Found " + rows.size() + " rows in range:" + startTerm + " to " + "" + " in " + (System.currentTimeMillis() - start) + "ms"); } if (actualReadSize > 0) { for (Row row : rows) { if (row.cf == null) { //logger.info("Encountered deleted row"); continue; } String key = ByteBufferUtil.string(row.key.key, CassandraUtils.UTF_8); // term keys look like wikipedia/body/wiki String termStr = key.substring(key.indexOf(CassandraUtils.delimeter) + CassandraUtils.delimeter.length()); Term term = CassandraUtils.parseTerm(termStr); columns = row.cf.getSortedColumns(); if (logger.isDebugEnabled()) logger.debug(term + " has " + columns.size()); // remove any deleted columns Collection<IColumn> columnsToRemove = null; for (IColumn col : columns) { if (!col.isLive()) { if (columnsToRemove == null) columnsToRemove = new ArrayList<IColumn>(); if(logger.isDebugEnabled()) logger.debug("Removing "+col+" documents from "+term); columnsToRemove.add(col); } if (logger.isDebugEnabled()) logger.debug("Kept DocId " + CassandraUtils.readVInt(col.name())); } if (columnsToRemove != null) { columns.removeAll(columnsToRemove); } if (!columns.isEmpty()) { if (logger.isDebugEnabled()) logger.debug("saving term: " + term + " with " + columns.size() + " docs"); termList.put(term, convertTermInfo(columns)); //update end of range if(queryRange.right == null || queryRange.right.compareTo(term) < 0) queryRange.right = term; } else { if (logger.isDebugEnabled()) logger.debug("Skipped term: " + term); } } if(queryRange.right == null) queryRange.right = emptyTerm; // to recall we did this query termQueryBoundries.putAll(localRanges); } long end = System.currentTimeMillis(); if (logger.isDebugEnabled()) { logger.debug("loadTerms: " + startTerm + "(" + actualReadSize + ") took " + (end - start) + "ms"); } return queryRange; } }
# frozen_string_literal: true template '/etc/nsswitch.conf' do source 'etc/nsswitch.conf.erb' action :create end template '/etc/ntp.conf' do source 'etc/ntp.conf.erb' action :create end
import fetchMock from 'fetch-mock'; import thunk from 'redux-thunk'; import configureMockStore from 'redux-mock-store'; import config from '@authenticator/config'; import { verify, checkAddress } from '@authenticator/contact/actions'; import { REQUEST, REQUEST_ERROR, VERIFY_CONTACT, VERIFIED, } from '@authenticator/contact/constants'; import { mockToken } from '@authenticator/identity/mock'; describe('Contact Actions: Check Address Test', (): void => { let storeMock: any; beforeEach((): void => { const middlewares = [ thunk ]; const mockStore = configureMockStore(middlewares); storeMock = mockStore({}); }); afterEach((): void => { fetchMock.restore(); }); test('dispatches REQUEST_ERROR on request', async (): Promise<void> => { const url = `${config.api.baseURL}/api/v1/contact/check-address`; fetchMock.mock(url, { status: 400, body: { error: { message: 'Email address is invalid', code: 'invalid_field', }, }, }); await storeMock.dispatch(checkAddress({ deliveryMethod: 'email', address: 'invalid' })); expect(storeMock.getActions()).toEqual([ { type: REQUEST }, { type: REQUEST_ERROR, error: { code: 'invalid_field', message: 'Email address is invalid', }}, ]); }); test('dispatches REQUEST_ERROR on setting token', async (): Promise<void> => { const url = `${config.api.baseURL}/api/v1/contact/check-address`; fetchMock.mock(url, { status: 201, body: { token: 'jwt-token', clientID: 'client-id', }, }); await storeMock.dispatch(checkAddress({ deliveryMethod: 'email', address: '<EMAIL>' })); expect(storeMock.getActions()).toEqual([ { type: REQUEST }, { type: REQUEST_ERROR, error: { code: 'invalid_token', message: 'Token is not correctly formatted', }}, ]); }); test('dispatches VERIFY_CONTACT success', async (): Promise<void> => { const url = `${config.api.baseURL}/api/v1/contact/check-address`; fetchMock.mock(url, { status: 201, body: { token: mockToken, clientID: 'client-id', }, }); await storeMock.dispatch(checkAddress({ deliveryMethod: 'email', address: '<EMAIL>' })); expect(storeMock.getActions()).toEqual([ { type: REQUEST }, { type: VERIFY_CONTACT }, ]); }); }); describe('ContactVerify Actions: Verify Test', (): void => { let storeMock: any; beforeEach((): void => { const middlewares = [ thunk ]; const mockStore = configureMockStore(middlewares); storeMock = mockStore({}); }); afterEach((): void => { fetchMock.restore(); }); test('dispatches error on request', async (): Promise<void> => { const url = `${config.api.baseURL}/api/v1/contact/verify`; fetchMock.mock(url, { status: 400, body: { error: { message: 'Code is invalid', code: 'invalid_code', }, }, }); await storeMock.dispatch(verify({ code: '123456', isDisabled: false })); expect(storeMock.getActions()).toEqual([ { type: REQUEST }, { type: REQUEST_ERROR, error: { code: 'invalid_code', message: 'Code is invalid', }}, ]); }); test('dispatches error on setting token', async (): Promise<void> => { const url = `${config.api.baseURL}/api/v1/contact/verify`; fetchMock.mock(url, { status: 201, body: { token: 'jwt-token', clientID: 'client-id', }, }); await storeMock.dispatch(verify({ code: '123456', isDisabled: false })); expect(storeMock.getActions()).toEqual([ { type: REQUEST }, { type: REQUEST_ERROR, error: { code: 'invalid_token', message: 'Token is not correctly formatted', }}, ]); }); test('dispatches success', async (): Promise<void> => { const url = `${config.api.baseURL}/api/v1/contact/verify`; fetchMock.mock(url, { status: 201, body: { token: <PASSWORD>, clientID: 'client-id', }, }); await storeMock.dispatch(verify({ code: '123456', isDisabled: false })); expect(storeMock.getActions()).toEqual([ { type: REQUEST }, { type: VERIFIED }, ]); }); });
# Example Python program for finding the area of a circle # Adjust for compatibility with Python3 and above import sys PI = 3.14 if sys.version_info.major >= 3: long = int def calculateArea(radius): return PI * radius * radius; print("Area is %.6f" % calculateArea(long(5)));
<gh_stars>0 import { async, ComponentFixture, TestBed } from '@angular/core/testing'; import { MuiGridComponent } from './mui-grid.component'; describe('MuiGridComponent', () => { let component: MuiGridComponent; let fixture: ComponentFixture<MuiGridComponent>; beforeEach(async(() => { TestBed.configureTestingModule({ declarations: [ MuiGridComponent ] }) .compileComponents(); })); beforeEach(() => { fixture = TestBed.createComponent(MuiGridComponent); component = fixture.componentInstance; fixture.detectChanges(); }); it('should create', () => { expect(component).toBeTruthy(); }); });
<gh_stars>0 'use strict'; var path = require('path'); var express = require('express'); var auth = require('http-auth'); var nunjucks = require('nunjucks'); var marked = require('marked'); var config = { port: process.env.PORT || 3300, accessToken: process.env.SUBSIDYSTORIESEU_ASSESS_TOKEN || '' }; var app = express(); app.set('config', config); app.set('port', config.port); var middlewares = []; // simple auth if (config.accessToken != '') { middlewares.push(auth.connect(auth.basic( {realm: 'Protected area.'}, function(username, password, callback) { var token = config.accessToken; var allow = ((username == token) && (password == '')) || ((username == '') && (password == token)) || ((username == token) && (password == token)); callback(allow); } ))); } // assets app.use('/public', express.static(path.join(__dirname, 'public'))); var env = nunjucks.configure(path.join(__dirname, '/app/views'), { autoescape: true, express: app }); env.addGlobal('marked', marked); env.addFilter('marked', marked); // pages app.get( /\/(|index\.html|country\.html)$/, middlewares, function(req, res) { var template = (req.params[0] != '' ? req.params[0] : 'index.html'); res.render(template); } ); app.listen(app.get('port'), function() { console.log('Listening on :' + app.get('port')); });
<gh_stars>0 var numeral = require('numeral'); module.exports = function Cart(oldCart) { this.items = oldCart.items || {}; this.totalQty = oldCart.totalQty || 0; this.totalPrice = oldCart.totalPrice || 0; this.totalQtyStr = numeral(oldCart.totalQty).format('(0,0.00)'); this.totalPriceStr = numeral(oldCart.totalPrice).format('(0,0.00)'); this.add = function(item, id) { var storedItem = this.items[id]; if (!storedItem) { storedItem = this.items[id] = {item: item, qty: 0, price: 0}; } storedItem.qty++; storedItem.price = storedItem.item.price; storedItem.totprod = storedItem.item.price * storedItem.qty; storedItem.qtyStr = numeral(storedItem.qty).format('(0,0.00)'); storedItem.priceStr = numeral(storedItem.price).format('(0,0.00)'); storedItem.totprodStr = numeral(storedItem.totprod).format('(0,0.00)'); this.totalQty++; this.totalPrice += storedItem.item.price; this.totalQtyStr = numeral(this.totalQty).format('(0,0.00)'); this.totalPriceStr = numeral(this.totalPrice).format('(0,0.00)'); }; this.less1 = function(item, id) { var storedItem = this.items[id]; if (storedItem.qty > 1) { storedItem.qty--; } storedItem.totprod = storedItem.item.price * storedItem.qty; storedItem.qtyStr = numeral(storedItem.qty).format('(0,0.00)'); storedItem.priceStr = numeral(storedItem.price).format('(0,0.00)'); storedItem.totprodStr = numeral(storedItem.totprod).format('(0,0.00)'); this.totalQty--; this.totalPrice -= storedItem.item.price; this.totalQtyStr = numeral(this.totalQty).format('(0,0.00)'); this.totalPriceStr = numeral(this.totalPrice).format('(0,0.00)'); }; this.generateArray = function() { var arr= []; for (var id in this.item) { arr.push.item[id]; } return arr; } };
privkey=${bankrollerPrivateKey:-"0x99a9681faf8e1e178902fe911fb7ba8df7a77539246cf5d76c0012324cd8a175"} echo "" echo "" echo "" echo "Start bankroller with privkey: $privkey" echo "" echo "" echo "" bankroller-core start -r $privkey #node_modules/.bin/concurrently "node_modules/.bin/bankroller-core start -r $privkey" "npm run start-gameserver"
#!/bin/sh -x if [ "x$COMMON_CONF" = "x" ]; then COMMON_CONF="$DIRNAME/common.conf" else if [ ! -r "$COMMON_CONF" ]; then echo "Config file not found $COMMON_CONF" fi fi if [ -r "$COMMON_CONF" ]; then . "$COMMON_CONF" fi setModularJdk() { "$JAVA" --add-modules=java.se -version > /dev/null 2>&1 && MODULAR_JDK=true || MODULAR_JDK=false } setDefaultModularJvmOptions() { setModularJdk if [ "$MODULAR_JDK" = "true" ]; then DEFAULT_MODULAR_JVM_OPTIONS=`echo $* | $GREP "\-\-add\-modules"` if [ "x$DEFAULT_MODULAR_JVM_OPTIONS" = "x" ]; then # Set default modular jdk options # NB: In case an update is made to these exports and opens, make sure that bootable-jar/boot/pom.xml script is in sync. # Needed by the iiop-openjdk subsystem DEFAULT_MODULAR_JVM_OPTIONS="$DEFAULT_MODULAR_JVM_OPTIONS --add-exports=java.desktop/sun.awt=ALL-UNNAMED" # Needed to instantiate the default InitialContextFactory implementation used by the # Elytron subsystem dir-context and core management ldap-connection resources DEFAULT_MODULAR_JVM_OPTIONS="$DEFAULT_MODULAR_JVM_OPTIONS --add-exports=java.naming/com.sun.jndi.ldap=ALL-UNNAMED" # Needed if Hibernate applications use Javassist DEFAULT_MODULAR_JVM_OPTIONS="$DEFAULT_MODULAR_JVM_OPTIONS --add-opens=java.base/java.lang=ALL-UNNAMED" # Needed by the MicroProfile REST Client subsystem DEFAULT_MODULAR_JVM_OPTIONS="$DEFAULT_MODULAR_JVM_OPTIONS --add-opens=java.base/java.lang.invoke=ALL-UNNAMED" # Needed by JBoss Marshalling DEFAULT_MODULAR_JVM_OPTIONS="$DEFAULT_MODULAR_JVM_OPTIONS --add-opens=java.base/java.io=ALL-UNNAMED" # Needed by WildFly Security Manager DEFAULT_MODULAR_JVM_OPTIONS="$DEFAULT_MODULAR_JVM_OPTIONS --add-opens=java.base/java.security=ALL-UNNAMED" # Needed for marshalling of enum maps DEFAULT_MODULAR_JVM_OPTIONS="$DEFAULT_MODULAR_JVM_OPTIONS --add-opens=java.base/java.util=ALL-UNNAMED" # EE integration with sar mbeans requires deep reflection in javax.management DEFAULT_MODULAR_JVM_OPTIONS="$DEFAULT_MODULAR_JVM_OPTIONS --add-opens=java.management/javax.management=ALL-UNNAMED" # InitialContext proxy generation requires deep reflection in javax.naming DEFAULT_MODULAR_JVM_OPTIONS="$DEFAULT_MODULAR_JVM_OPTIONS --add-opens=java.naming/javax.naming=ALL-UNNAMED" else DEFAULT_MODULAR_JVM_OPTIONS="" fi fi }
<filename>src/core/systems/MessageSystem.h #ifndef _WKT_MESSAGE_SYSTEM_H #define _WKT_MESSAGE_SYSTEM_H #include "ecs/System.h" #include "ecs/Entity.h" #include "components/Script.h" #include "utils/Message.h" #include <vector> #include <string> namespace wkt { namespace systems { class MessageSystem : public wkt::ecs::SequentialSystem<wkt::components::Script> { public: MessageSystem(); public: void operator()(std::shared_ptr<wkt::components::Script>); void shutdown() override; private: std::vector<wkt::utils::Message<std::string>> messages; std::vector<wkt::ecs::Entity*> entities; }; }} #endif
<reponame>lsds/faasm import os import docker from packaging import version from copy import copy from os.path import join from subprocess import run, PIPE from invoke import task from faasmcli.util.env import ( FAASM_SGX_MODE_DISABLED, FAASM_SGX_MODE_HARDWARE, FAASM_SGX_MODE_SIM, PROJ_ROOT, ) from faasmcli.util.version import get_faasm_version SGX_HW_CONTAINER_SUFFIX = "-sgx" SGX_SIMULATION_CONTAINER_SUFFIX = "-sgx-sim" CONTAINER_NAME2FILE_MAP = { "redis": "redis.dockerfile", "minio": "minio.dockerfile", "base": "base.dockerfile", "base-sgx": "base-sgx.dockerfile", "base-sgx-sim": "base-sgx.dockerfile", "upload": "upload.dockerfile", "worker": "worker.dockerfile", "worker-sgx": "worker.dockerfile", "worker-sgx-sim": "worker.dockerfile", "cli": "cli.dockerfile", "cli-sgx": "cli.dockerfile", "cli-sgx-sim": "cli.dockerfile", "sgx-aesmd": "sgx-aesmd.dockerfile", } @task def purge(context): """ Purge docker images """ images_cmd = ["docker", "images", "-q", "-f", "dangling=true"] cmd_out = run(images_cmd, stdout=PIPE, stderr=PIPE, check=True) image_list = cmd_out.stdout for img in image_list.decode().split("\n"): if not img.strip(): continue print("Removing {}".format(img)) cmd = ["docker", "rmi", "-f", img] run(cmd, check=True) def _check_valid_containers(containers): for container_name in containers: if container_name not in CONTAINER_NAME2FILE_MAP: print( "Could not find dockerfile for container: {}".format( container_name ) ) raise RuntimeError("Invalid container: {}".format(container_name)) def _do_push(container, version): run( "docker push faasm/{}:{}".format(container, version), shell=True, cwd=PROJ_ROOT, check=True, ) @task(iterable=["c"]) def build(ctx, c, nocache=False, push=False): """ Build latest version of container images """ # Use buildkit for nicer logging shell_env = copy(os.environ) shell_env["DOCKER_BUILDKIT"] = "1" _check_valid_containers(c) faasm_ver = get_faasm_version() for container_name in c: # Prepare dockerfile and tag name dockerfile = join("docker", CONTAINER_NAME2FILE_MAP[container_name]) tag_name = "faasm/{}:{}".format(container_name, faasm_ver) # Prepare build arguments build_args = {"FAASM_VERSION": faasm_ver} if container_name.endswith(SGX_HW_CONTAINER_SUFFIX): build_args["FAASM_SGX_MODE"] = FAASM_SGX_MODE_HARDWARE build_args["FAASM_SGX_PARENT_SUFFIX"] = SGX_HW_CONTAINER_SUFFIX elif container_name.endswith(SGX_SIMULATION_CONTAINER_SUFFIX): build_args["FAASM_SGX_MODE"] = FAASM_SGX_MODE_SIM build_args[ "FAASM_SGX_PARENT_SUFFIX" ] = SGX_SIMULATION_CONTAINER_SUFFIX else: build_args["FAASM_SGX_MODE"] = FAASM_SGX_MODE_DISABLED # Prepare docker command cmd = [ "docker build {}".format("--no-cache" if nocache else ""), "-t {}".format(tag_name), "{}".format( " ".join( [ "--build-arg {}={}".format(arg, build_args[arg]) for arg in build_args ] ) ), "-f {} .".format(dockerfile), ] docker_cmd = " ".join(cmd) print(docker_cmd) # Build (and push) docker image run(docker_cmd, shell=True, check=True, cwd=PROJ_ROOT, env=shell_env) if push: _do_push(container_name, faasm_ver) @task def build_all(ctx, nocache=False, push=False): """ Build all available containers """ build(ctx, [c for c in CONTAINER_NAME2FILE_MAP], nocache, push) @task(iterable=["c"]) def push(ctx, c): """ Push container images """ faasm_ver = get_faasm_version() _check_valid_containers(c) for container in c: _do_push(container, faasm_ver) @task(iterable=["c"]) def pull(ctx, c): """ Pull container images """ faasm_ver = get_faasm_version() _check_valid_containers(c) for container in c: run( "docker pull faasm/{}:{}".format(container, faasm_ver), shell=True, check=True, cwd=PROJ_ROOT, ) @task def delete_old(ctx): """ Deletes old Docker images """ faasm_ver = get_faasm_version() dock = docker.from_env() images = dock.images.list() for image in images: for t in image.tags: if not t.startswith("faasm/"): continue tag_ver = t.split(":")[-1] if version.parse(tag_ver) < version.parse(faasm_ver): print("Removing old image: {}".format(t)) dock.images.remove(t, force=True)
#!/bin/bash HOST="example.host.com" USERNAME="testuser" PASSWORD="mysecretpassword" # Establish connection to server sshpass -p ${PASSWORD} ssh -o StrictHostKeyChecking=no ${USERNAME}@${HOST} << 'ENDSSH' # Run remote command ls -la # End the SSH session exit ENDSSH
CREATE TABLE LibraryCatalog ( id INTEGER PRIMARY KEY, title TEXT NOT NULL, author TEXT NOT NULL, genre TEXT NOT NULL, pub_year INTEGER NOT NULL, checked_out INTEGER NOT NULL DEFAULT 0 );
from __future__ import absolute_import, division, print_function, unicode_literals from dateutil.relativedelta import relativedelta class Tenor(object): def __init__(self, tenor=None): if not self.check_tenor(tenor): raise ValueError("Invalid tenor: %s" % tenor) self.tenor = tenor @staticmethod def valid_tenors(): return ['1M', '3M', '6M', '9M', '1Y', '2Y', '5Y', '10Y', '15Y', '20Y', '30Y', '40Y', '50Y'] @classmethod def check_tenor(cls, tenor): return tenor in cls.valid_tenors() @classmethod def tenor_to_relativedelta(cls, tenor): if not cls.check_tenor(tenor): raise ValueError("Invalid tenor: %s" % tenor) tenor_mapping = {'M': 'months', 'Y': 'years'} time_quantity = int(tenor[:-1]) time_attribute = tenor_mapping.get(tenor[-1]) relative_delta_attribute = {time_attribute: time_quantity} return relativedelta(**relative_delta_attribute) def to_relativedelta(self): return self.tenor_to_relativedelta(self.tenor)
Merge Sort is the best algorithm for sorting large datasets. Merge Sort is a Divide and Conquer algorithm that has time complexity of O(n*log(n)) and is significantly faster than other sorting algorithms such as Bubble Sort and Insertion Sort in the case of large datasets. It is also a stable algorithm, meaning that results with equal values will appear in the same order as they were in the input. This makes Merge Sort a great choice for sorting large datasets.
<reponame>bbarenblat/FiveUI exports.name = "Horizontal rules are deprecated"; exports.description = "Horizontal rules (----) are deprecated."; exports.rule = function(report) { $5('#mw-content-text hr').each(function(i, hr) { report.warning('Remove horizontal rule.', hr); }); };
clear echo -e "Please wait while we install and configure Minio. It may take a few minutes..." until [ `kubectl get ep | grep px-minio | grep 9000 | wc -l` == 1 ] ; do printf '.' sleep 1 done echo -e "" echo "Minio Deployment Started!" echo -e "" echo -e "Installing the Minio Client and adding hosts" until [ `ps -ef|grep load-quiz.sh | grep -vi grep | wc -l` -eq 0 ] ; do printf '.' sleep 1 done clear
//创建一个 style标签并插入页面 module.exports = function (source) { let script = (` let style = document.createElement("style"); style.innerText = ${JSON.stringify(source)}; document.head.appendChild(style); `); return script; }
import express from 'express'; import { User } from '../Models/User.js'; import { Address } from '../Models/Address.js'; const user_router = express.Router(); user_router.post('/create', async (req, res) => { const name = req.body.name; const occupation = req.body.occupation; let newsletter = req.body.newsletter; if (newsletter === 'on') { newsletter = true; } else { newsletter = false; } await User.create({name, occupation, newsletter}); res.redirect('/') }); user_router.get('/create', (req, res) => { res.render('adduser'); }); user_router.post('/update', async (req, res) => { const id = req.body.id; const name = req.body.name; const occupation = req.body.occupation; let newsletter = req.body.newsletter; if (newsletter === 'on') { newsletter = true; } else { newsletter = false; } const dataUser = { id, name, occupation, newsletter }; await User.update(dataUser, {where: {id: id}}); res.redirect('/'); }); user_router.get('/edit/:id', async (req, res) => { const id = req.params.id; const user = await User.findOne({include: Address, where: {id: id}}); let checked; if (user.newsletter === 1) { checked = true; } else { checked = false; } res.render('edituser', {user: user.get({plain: true}), checked}); }); user_router.post('/delete/:id', async (req, res) => { const id = req.params.id; await User.destroy({where: {id: id}}); res.redirect('/') }); user_router.get('/:id', async (req, res) => { const id = req.params.id; const user = await User.findOne({raw: true, where: {id: id}}); res.render('userview', {user}); }); export { user_router };
const unsigned char polines [] = { // 'Logo-Polines-Politeknik-Negeri-Semarang-Hitam-Putih-Original, 128x64px 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1f, 0xf8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7e, 0x7e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0xf8, 0x1f, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0f, 0xe0, 0x07, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3f, 0x80, 0x01, 0xfc, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7e, 0x00, 0x00, 0x7e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf8, 0x08, 0x20, 0x1f, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0xe0, 0x00, 0x20, 0x07, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x80, 0x00, 0x00, 0x03, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x06, 0x00, 0x00, 0x40, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x04, 0x00, 0x00, 0x00, 0x78, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1c, 0x10, 0x00, 0x00, 0x10, 0x38, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3c, 0x00, 0x00, 0x00, 0x00, 0x3c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x78, 0x00, 0x00, 0x00, 0x04, 0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf0, 0x40, 0x00, 0x00, 0x01, 0x0f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0xe3, 0x00, 0x60, 0x02, 0x01, 0x07, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0xc1, 0x00, 0x60, 0x06, 0x00, 0x03, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x80, 0x00, 0x60, 0x06, 0x00, 0xc1, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0f, 0x00, 0x03, 0x60, 0x86, 0x40, 0x00, 0xf8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x03, 0x21, 0x86, 0xc0, 0x20, 0x78, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x03, 0x03, 0xe0, 0xc0, 0x20, 0x70, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x03, 0x8f, 0xf1, 0xc0, 0x00, 0x70, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0b, 0x8f, 0xf1, 0xc0, 0x00, 0x70, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x00, 0x09, 0xcf, 0xf9, 0x90, 0x00, 0x70, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x00, 0x0c, 0x1f, 0xf8, 0x30, 0x00, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x00, 0x0f, 0x1f, 0xf8, 0x70, 0x00, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x00, 0x0f, 0x9f, 0xf9, 0xf0, 0x00, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x00, 0x07, 0xdf, 0xf9, 0xe0, 0x00, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x00, 0x10, 0x1f, 0xf8, 0x08, 0x08, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x00, 0x1e, 0x0f, 0xf8, 0x78, 0x00, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x04, 0x0f, 0xc7, 0xf3, 0xf8, 0x00, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x14, 0x0f, 0xf3, 0xe7, 0xf0, 0x00, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x01, 0xe1, 0x87, 0xc0, 0x00, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x80, 0x00, 0x01, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x80, 0x00, 0x18, 0x90, 0x00, 0x01, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x80, 0x0b, 0xfd, 0xbf, 0xb0, 0x01, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x80, 0x1f, 0xfd, 0xbf, 0xf8, 0x03, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0xc0, 0x3f, 0xfd, 0xbf, 0xf8, 0x03, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0xc0, 0x3f, 0xfd, 0x3f, 0xf8, 0x07, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe0, 0x3f, 0x7c, 0x3f, 0x78, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe0, 0x03, 0xf8, 0x17, 0xc0, 0x0f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x70, 0x00, 0x3c, 0x3c, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x78, 0x00, 0x00, 0x00, 0x00, 0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x38, 0x00, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1c, 0x28, 0x00, 0x00, 0x00, 0x38, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, 0x20, 0x38, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x80, 0x04, 0x00, 0x70, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x00, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0xfe, 0x00, 0x00, 0x7f, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0xff, 0xe0, 0x07, 0xff, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0xff, 0xff, 0xff, 0xff, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0xff, 0xff, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0f, 0xf8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; const unsigned char polines_small [] = { // 'Logo-Polines-Politeknik-Negeri-Semarang-Hitam-Putih-Original, 30x32px 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x0e, 0xc0, 0x00, 0x00, 0x38, 0x78, 0x00, 0x00, 0xc0, 0x0c, 0x00, 0x01, 0x40, 0x03, 0x00, 0x02, 0x00, 0x01, 0x80, 0x04, 0x00, 0x00, 0x80, 0x0c, 0x00, 0x00, 0x40, 0x18, 0x08, 0x40, 0x60, 0x30, 0x28, 0x50, 0x30, 0x20, 0x33, 0x10, 0x10, 0x30, 0x57, 0xb8, 0x10, 0x30, 0x47, 0x88, 0x30, 0x10, 0x37, 0xb0, 0x30, 0x10, 0x77, 0xb8, 0x30, 0x10, 0x3f, 0xf0, 0x20, 0x10, 0x01, 0x00, 0x20, 0x10, 0x7d, 0xe8, 0x20, 0x18, 0x7d, 0xf8, 0x60, 0x08, 0x6c, 0xf0, 0x40, 0x0c, 0x08, 0xc0, 0xc0, 0x04, 0x00, 0x00, 0x80, 0x06, 0x00, 0x01, 0x80, 0x02, 0x00, 0x01, 0x00, 0x03, 0xe0, 0x1f, 0x00, 0x00, 0x7f, 0xf8, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 };
#! /bin/bash set -e if [ -n "$PBS_JOBNAME" ] then if [ -f "${PBS_O_HOME}/.bashrc" ] then source "${PBS_O_HOME}/.bashrc" fi cd /gpfs01/scratch/tcm0036/codiv-sanger-bake-off/scripts/simcoevolity-scripts else cd "$( dirname "\${BASH_SOURCE[0]}" )" fi project_dir="../.." exe_path="${project_dir}/bin/simcoevolity" if [ ! -x "$exe_path" ] then echo "ERROR: No executable '${exe_path}'." echo " You probably need to run the project setup script." exit 1 fi source "${project_dir}/modules-to-load.sh" >/dev/null 2>&1 || echo " No modules loaded" if [ ! -f "${project_dir}/pyenv/bin/activate" ] then echo "ERROR: Python environment \"${project_dir}/pyenv\" does not exist." echo " You probably need to run the project setup script." exit 1 fi source "${project_dir}/pyenv/bin/activate" rng_seed=494252570 number_of_reps=20 locus_size=500 config_path="../../configs/fixed-independent-pairs-05-sites-10000.yml" prior_config_path="../../configs/pairs-05-sites-10000.yml" output_dir="../../simulations/fixed-independent-pairs-05-sites-10000-locus-500/batch-494252570" qsub_set_up_script_path="../set_up_ecoevolity_qsubs.py" mkdir -p "$output_dir" "$exe_path" --seed="$rng_seed" -n "$number_of_reps" -p "$prior_config_path" -l "$locus_size" -o "$output_dir" "$config_path" && "$qsub_set_up_script_path" "$output_dir"
#!/bin/sh set -e set -u set -o pipefail function on_error { echo "$(realpath -mq "${0}"):$1: error: Unexpected failure" } trap 'on_error $LINENO' ERR if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then # If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy # frameworks to, so exit 0 (signalling the script phase was successful). exit 0 fi echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}" SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" # Used as a return value for each invocation of `strip_invalid_archs` function. STRIP_BINARY_RETVAL=0 # This protects against multiple targets copying the same framework dependency at the same time. The solution # was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????") # Copies and strips a vendored framework install_framework() { if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then local source="${BUILT_PRODUCTS_DIR}/$1" elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")" elif [ -r "$1" ]; then local source="$1" fi local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" if [ -L "${source}" ]; then echo "Symlinked..." source="$(readlink "${source}")" fi # Use filter instead of exclude so missing patterns don't throw errors. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}" local basename basename="$(basename -s .framework "$1")" binary="${destination}/${basename}.framework/${basename}" if ! [ -r "$binary" ]; then binary="${destination}/${basename}" elif [ -L "${binary}" ]; then echo "Destination binary is symlinked..." dirname="$(dirname "${binary}")" binary="${dirname}/$(readlink "${binary}")" fi # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then strip_invalid_archs "$binary" fi # Resign the code if required by the build settings to avoid unstable apps code_sign_if_enabled "${destination}/$(basename "$1")" # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7. if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then local swift_runtime_libs swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u) for lib in $swift_runtime_libs; do echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\"" rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}" code_sign_if_enabled "${destination}/${lib}" done fi } # Copies and strips a vendored dSYM install_dsym() { local source="$1" if [ -r "$source" ]; then # Copy the dSYM into a the targets temp dir. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}" local basename basename="$(basename -s .framework.dSYM "$source")" binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}" # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then strip_invalid_archs "$binary" fi if [[ $STRIP_BINARY_RETVAL == 1 ]]; then # Move the stripped file into its final destination. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}" else # The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing. touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM" fi fi } # Copies the bcsymbolmap files of a vendored framework install_bcsymbolmap() { local bcsymbolmap_path="$1" local destination="${BUILT_PRODUCTS_DIR}" echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}" } # Signs a framework with the provided identity code_sign_if_enabled() { if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then # Use the current code_sign_identity echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}" local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'" if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then code_sign_cmd="$code_sign_cmd &" fi echo "$code_sign_cmd" eval "$code_sign_cmd" fi } # Strip invalid architectures strip_invalid_archs() { binary="$1" # Get architectures for current target binary binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)" # Intersect them with the architectures we are building for intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)" # If there are no archs supported by this binary then warn the user if [[ -z "$intersected_archs" ]]; then echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)." STRIP_BINARY_RETVAL=0 return fi stripped="" for arch in $binary_archs; do if ! [[ "${ARCHS}" == *"$arch"* ]]; then # Strip non-valid architectures in-place lipo -remove "$arch" -output "$binary" "$binary" stripped="$stripped $arch" fi done if [[ "$stripped" ]]; then echo "Stripped $binary of architectures:$stripped" fi STRIP_BINARY_RETVAL=1 } if [[ "$CONFIGURATION" == "Debug" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/SSKeychain/SSKeychain.framework" fi if [[ "$CONFIGURATION" == "Release" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/SSKeychain/SSKeychain.framework" fi if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then wait fi
<reponame>AndySmile/BachelorThesis /** * DemoApp - Terrain Builder Test Suite. * * @author <NAME><<EMAIL>> * @file Src/Test/TerrainBuilderTest.cpp * @version 1.3.0 09-Jan-15 * @copyright Copyright (c) 2014-2015 by <NAME>. All rights reserved. (http://andysmiles4games.com) */ #include <Test/TerrainBuilderTest.h> #include <TerrainBuilder.h> #include <ImageProcessorHeightMap.h> namespace DemoAppTest { void TerrainBuilderTest::createTerrainTest(void) { TerrainBuilder builder(TerrainBuilder::TypeVoxel, "./Resource/HeightMap_6.png"); ImageTransformer* transformer = builder.getImageTransformer(); // test assigned values via constructor CPPUNIT_ASSERT(builder.getTerrainType() == TerrainBuilder::TypeVoxel); CPPUNIT_ASSERT(transformer != NULL); builder.setTerrainType(TerrainBuilder::TypeMesh); // test terrain type setter CPPUNIT_ASSERT(builder.getTerrainType() == TerrainBuilder::TypeMesh); TerrainEnvironment* environment = builder.getTerrainEnvironment(); TerrainDecorator* decorator = builder.getTerrainDecorator(); // test default terrain builder components CPPUNIT_ASSERT(environment != NULL); CPPUNIT_ASSERT(decorator == NULL); // setup image tansformer transformer->addProcessor(new ImageProcessorHeightMap(200.0f)); TerrainAbstract* terrain = builder.build(); CPPUNIT_ASSERT(terrain != NULL); // release memory if (terrain != NULL) { delete terrain; terrain = NULL; } builder.release(); } }
import { hasElements } from '../utils' describe('hasElements', () => { it('should return false for value different than array', () => { const subject1 = false const subject2 = 12 const subject3 = 'string' const subject4 = {} const subject5 = () => {} expect(hasElements(subject1)).toBeFalsy() expect(hasElements(subject2)).toBeFalsy() expect(hasElements(subject3)).toBeFalsy() expect(hasElements(subject4)).toBeFalsy() expect(hasElements(subject5)).toBeFalsy() }) it('should return false for empty array', () => { expect(hasElements([])).toBeFalsy() }) it('should return true for array with at least one element', () => { expect(hasElements([1])).toBeTruthy() expect(hasElements([false, {}])).toBeTruthy() expect(hasElements(['a', 'b', 'c'])).toBeTruthy() }) })
import SheetManager from 'core/SheetManage'; import { ActionManagerNotify, ActionName, ActionShape, ActionsManagerInterface, } from './types'; class ActionsManage implements ActionsManagerInterface { sheetManager: SheetManager; actions: ActionsManagerInterface['actions']; updater: ActionManagerNotify; constructor(sheetManager: SheetManager, updater: ActionManagerNotify) { this.sheetManager = sheetManager; this.updater = updater; this.actions = {} as ActionsManagerInterface['actions']; } registerAction(action: ActionShape) { this.actions[action.name] = action; } registerAll(actions: ActionShape[]) { for (const action of actions) { if (!this.actions[action.name]) { this.registerAction(action); } } } executeAction<T extends ActionName, F>( action: ActionShape<T, F>, payload: F ) { this.updater( action.perform.bind(action, this.sheetManager, payload), action as any ); } handleKeydown(event: KeyboardEvent) { const actionToExcute = Object.values(this.actions).filter( action => action.keyTest && action.keyTest(event, this.sheetManager) ); if (actionToExcute.length > 0) { this.executeAction(actionToExcute[0] as any, event as any); } } } export { ActionsManage };
const dgram = require('dgram'); const message = Buffer.from('深入浅出 Node.js'); const client = dgram.createSocket('udp4'); client.send(message, 0, message.length, 41234, 'localhost', (err, bytes) => { client.close(); });
<gh_stars>0 'use strict' /* |-------------------------------------------------------------------------- | Router |-------------------------------------------------------------------------- | | AdonisJs Router helps you in defining urls and their actions. It supports | all major HTTP conventions to keep your routes file descriptive and | clean. | | @example | Route.get('/user', 'UserController.index') | Route.post('/user', 'UserController.store') | Route.resource('user', 'UserController') */ const Route = use('Route') Route.group('version1', () => { // Registering users routes Route.route('/users', 'POST', 'UsersController.create') Route.route('/users', 'GET', 'UsersController.getAll') Route.route('/users', 'PUT', 'UsersController.update') Route.route('/users', 'DELETE', 'UsersController.remove') Route.route('/users/login', 'POST', 'UsersController.login') Route.route('/users/logout', 'GET', 'UsersController.logout') Route.route('/utaites', 'POST', 'UtaitesController.create') Route.route('/utaites', 'GET', 'UtaitesController.getAll') Route.route('/utaites', 'PUT', 'UtaitesController.update') Route.route('/utaites', 'DELETE', 'UtaitesController.remove') Route.route('/covers/latest', 'GET', 'CoversController.get') }).prefix('/api/v1') // Adonis checks in order the routes so we put this last so that api calls work Route.any('*', function * (request, response) { yield response.sendView('home') })
// MainActivity.java package com.examples.listviewexample; import android.os.Bundle; import android.widget.ArrayAdapter; import android.widget.ListView; import androidx.appcompat.app.AppCompatActivity; public class MainActivity extends AppCompatActivity { String[] items = {"item1", "item2", "item3", "item4", "item5"}; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); ListView listView = (ListView) findViewById(R.id.list); ArrayAdapter<String> adapter = new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, items); listView.setAdapter(adapter); } }
<reponame>GeoscienceAustralia/igssitelog-java-bindings package au.gov.ga.geodesy.igssitelog.domain.model; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.SequenceGenerator; import javax.persistence.Table; import javax.validation.constraints.Size; /** * http://sopac.ucsd.edu/ns/geodesy/doc/igsSiteLog/contact/2564/baseContactLib.xsd:contactType */ @Entity @Table(name = "SITELOG_CONTACT") public class Contact { @Id @GeneratedValue(generator = "surrogateKeyGenerator") @SequenceGenerator(name = "surrogateKeyGenerator", sequenceName = "SEQ_SITELOGCONTACT") private Integer id; @Size(max = 256) @Column(name = "NAME", length = 256) protected String name; @Size(max = 256) @Column(name = "TELEPHONE_PRIMARY", length = 256) protected String telephonePrimary; @Size(max = 256) @Column(name = "TELEPHONE_SECONDARY", length = 256) protected String telephoneSecondary; @Size(max = 256) @Column(name = "FAX", length = 256) protected String fax; @Size(max = 256) @Column(name = "EMAIL", length = 256) protected String email; @SuppressWarnings("unused") private Integer getId() { return id; } @SuppressWarnings("unused") private void setId(Integer id) { this.id = id; } /** * Return name. */ public String getName() { return name; } /** * Set name. */ public void setName(String value) { this.name = value; } /** * Return primary telephone number. */ public String getTelephonePrimary() { return telephonePrimary; } /** * Set primary telephone number. */ public void setTelephonePrimary(String value) { this.telephonePrimary = value; } /** * Return secondary telephone number. */ public String getTelephoneSecondary() { return telephoneSecondary; } /** * Set secondary telephone number. */ public void setTelephoneSecondary(String value) { this.telephoneSecondary = value; } /** * Return fax number. */ public String getFax() { return fax; } /** * Set fax number. */ public void setFax(String value) { this.fax = value; } /** * Return email address. */ public String getEmail() { return email; } /** * Set email address. */ public void setEmail(String value) { this.email = value; } }
<filename>middleware/grpc/interceptor.go<gh_stars>0 package grpcMiddleware import ( "context" "github.com/alhamsya/boilerplate-go/lib/helpers/client" "github.com/alhamsya/boilerplate-go/lib/helpers/custom_log" "google.golang.org/grpc" ) //GrpcLoggingInterceptor GRPC log for interceptor func GrpcLoggingInterceptor(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) { clientIP := client.GrpcGetIP(ctx) resp, errHandler := handler(ctx, req) if errHandler != nil { customLog.ErrorF("[GRPC] %s [CLIENT] %s : %v", info.FullMethod, clientIP, errHandler) } customLog.InfoF("[GRPC] %s [CLIENT] %s : [REQUEST] %s", info.FullMethod, clientIP, req) return resp, nil }
<filename>test/examples/sync-new.js<gh_stars>1-10 function A () { this.b = b(); } function b () { return c(); } function c () { return function () { return 'hello world' }; } function aFactory () { return new A(); } function aFactory2 () { return new A; } new A().b(); aFactory().b(); aFactory2().b();
import socket import md5 def crack_password(target_hash, port): server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_socket.bind(('127.0.0.1', port)) server_socket.listen(1) conn, addr = server_socket.accept() while True: received_hash = conn.recv(32) # Assuming each MD5 hash is 32 characters long word = '' for i in range(10000): if md5.new('word{}'.format(i)).hexdigest() == received_hash: word = 'word{}'.format(i) break if word: conn.send(word) return word else: conn.send('Password not found') conn.close() server_socket.close()
import { IMapDataItem } from "./IMapDataItem"; export interface IStandAllocation extends IMapDataItem { PictogramId: string }
#!/usr/bin/env bash ../../.venv/bin/uvicorn server:app --reload --host 0.0.0.0 --port 5000 --log-level info --env-file ../../.env.local
<filename>solo/server/runtime/dependencies.py<gh_stars>1-10 import asyncio from typing import Callable, Mapping, Any, get_type_hints, NamedTuple, Type, TypeVar, Awaitable, Tuple from pyrsistent import pmap from solo.configurator.registry import Registry from solo.server.db.types import SQLEngine from solo.server.request import Request from solo.types import IO from solo.vendor.old_session.old_session import Session, SessionStore class Runtime(NamedTuple): registry: Registry dbengine: SQLEngine memstore: Any session_storage: SessionStore def get_handler_deps( runtime: Runtime, handler: Callable, request: Request, ) -> Tuple[Mapping[str, IO], Mapping[str, Any]]: """ Returns a tuple of awaitable coroutine dependencies and rest dependencies. """ hints = get_type_hints(handler) hints.pop('return', None) iter_hints = (x for x in hints.items() if x[0] != 'return') rv = {True: {}, False:{}} for arg_name, dep_type in iter_hints: dependency_getter = DEPENDENCIES[dep_type] dep = dependency_getter(runtime) if callable(dep): dep = dep(request) rv[asyncio.iscoroutine(dep)][arg_name] = dep return pmap(rv[True]), pmap(rv[False]) T = TypeVar('T') DEPENDENCIES: Mapping[Type[T], Callable[[Runtime], T]] = pmap({ Registry: lambda runtime: runtime.registry, SQLEngine: lambda runtime: runtime.dbengine, Session: lambda runtime: lambda request: runtime.session_storage.load_session(request), SessionStore: lambda runtime: runtime.session_storage, })
import sys from datetime import datetime def log_message(message: str) -> None: timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") sys.stderr.write(f"{timestamp} - {message}\n")
import { contextToSvelte } from '../generators/context/svelte'; import { parseContext } from '../parsers/context'; import { componentToSvelte } from '../generators/svelte'; import { parseJsx } from '../parsers/jsx'; const onUpdate = require('./data/blocks/onUpdate.raw'); const multipleOUpdate = require('./data/blocks/multiple-onUpdate.raw'); const selfReferencingComponent = require('./data/blocks/self-referencing-component.raw'); const selfReferencingComponentWithChildren = require('./data/blocks/self-referencing-component-with-children.raw'); const builderRenderBlock = require('./data/blocks/builder-render-block.raw'); const rootShow = require('./data/blocks/rootShow.raw'); const simpleExample = require('./data/context/simple.context.lite'); const componentWithContext = require('./data/context/component-with-context.lite'); const renderBlock = require('./data/blocks/builder-render-block.raw'); describe('Svelte', () => { test('onUpdate', () => { const component = parseJsx(onUpdate); const output = componentToSvelte()({ component }); expect(output).toMatchSnapshot(); }); test('multipleOnUpdate', () => { const component = parseJsx(multipleOUpdate); const output = componentToSvelte()({ component }); expect(output).toMatchSnapshot(); }); test('selfReferencingComponent', () => { const component = parseJsx(selfReferencingComponent); const output = componentToSvelte()({ component }); expect(output).toMatchSnapshot(); }); test('selfReferencingComponentWithChildren', () => { const component = parseJsx(selfReferencingComponentWithChildren); const output = componentToSvelte()({ component }); expect(output).toMatchSnapshot(); }); test('BuilderRenderBlock', () => { const component = parseJsx(builderRenderBlock); const output = componentToSvelte()({ component }); expect(output).toMatchSnapshot(); }); test('rootShow', () => { const component = parseJsx(rootShow); const output = componentToSvelte()({ component }); expect(output).toMatchSnapshot(); }); describe('Context', () => { test('Parse context', () => { const component = parseContext(simpleExample, { name: 'SimpleExample' }); if (!component) { throw new Error( 'No parseable context found for simple.context.lite.ts', ); } expect(component).toMatchSnapshot(); const context = contextToSvelte()({ context: component }); expect(context).toMatchSnapshot(); }); test('Use and set context in components', () => { const component = parseJsx(componentWithContext); expect(component).toMatchSnapshot(); const output = componentToSvelte()({ component }); expect(output).toMatchSnapshot(); }); test('Use and set context in complex components', () => { const component = parseJsx(renderBlock); expect(component).toMatchSnapshot(); const output = componentToSvelte()({ component }); expect(output).toMatchSnapshot(); }); }); });
import { Routes, RouterModule } from '@angular/router'; import { LoginPageComponent } from '../login-page/login-page.component'; import { DashboardPageComponent } from '../dashboard-page/dashboard-page.component'; import { PagenotfoundPageComponent } from '../pagenotfound-page/pagenotfound-page.component'; import { AuthGuard } from '../guards/auth-guard.service'; export const APP_ROUTES: Routes = [ { path: 'dashboard', component: DashboardPageComponent, canActivate: [AuthGuard] }, { path: 'login', component: LoginPageComponent }, { path: '', redirectTo: '/dashboard', pathMatch: 'full' }, { path: '**', component: PagenotfoundPageComponent, canActivate: [AuthGuard] } ];
#!/usr/bin/env bash # extract-uniprotkb.sh: download UniProtKB dat distribution and extract to a dat file # Copyright 2019 Stephen A. Ramsey <stephen.ramsey@oregonstate.edu> set -o nounset -o pipefail -o errexit if [[ "${1:-}" == "--help" || "${1:-}" == "-h" ]]; then echo Usage: "$0 <uniprot_output_file.dat>" exit 2 fi echo "================= starting extract-uniprotkb.sh =================" date config_dir=`dirname "$0"` source ${config_dir}/master-config.shinc uniprotkb_dat_file=${1:-"${BUILD_DIR}/uniprot_sprot.dat"} uniprotkb_dir=`dirname ${uniprotkb_dat_file}` mkdir -p ${uniprotkb_dir} ${curl_get} ftp://ftp.uniprot.org/pub/databases/uniprot/current_release/knowledgebase/complete/uniprot_sprot.dat.gz \ > ${uniprotkb_dir}/uniprot_sprot.dat.gz ${curl_get} ftp://ftp.uniprot.org/pub/databases/uniprot/current_release/relnotes.txt \ > ${uniprotkb_dir}/relnotes.txt version_number=`grep -m 1 "UniProt Release" ${uniprotkb_dir}/relnotes.txt | cut -f3 -d ' '` update_date=`grep -m 1 "${version_number} (" ${uniprotkb_dir}/relnotes.txt | cut -f2 -d ' ' | cut -f2 -d '(' | cut -f1 -d ')'` start_string="# Version: ${version_number}, Date: ${update_date}" zcat ${uniprotkb_dir}/uniprot_sprot.dat.gz > /tmp/uniprot_sprot.dat echo ${start_string} > ${uniprotkb_dat_file} cat /tmp/uniprot_sprot.dat >> ${uniprotkb_dat_file} date echo "================= finished extract-uniprotkb.sh ================="
<gh_stars>1-10 module Gollum class Macro class AllPages < Gollum::Macro def render if @wiki.pages.size > 0 '<ul id="pages">' + @wiki.pages.map { |p| "<li>#{p.name}</li>" }.join + '</ul>' end end end end end
#!/usr/bin/env bash find . -name "*.json" -exec ctm-import-backup --force -c replace -f {} \;
<reponame>karzuo/merlin package service import ( "reflect" "testing" ) func Test_getVersionSearchTerms(t *testing.T) { type args struct { query string } tests := []struct { name string args args want map[string]string }{ { name: "empty query", args: args{""}, want: map[string]string{}, }, { name: "empty query with whitespaces", args: args{" "}, want: map[string]string{}, }, { name: "single key empty value", args: args{"foo:"}, want: map[string]string{"foo":""}, }, { name: "single key empty value with whitespaces", args: args{"foo : "}, want: map[string]string{"foo":""}, }, { name: "single key non-empty value", args: args{"foo: bar"}, want: map[string]string{"foo": "bar"}, }, { name: "single key non-empty value with whitespaces", args: args{"foo :bar"}, want: map[string]string{"foo": "bar"}, }, { name: "multiple keys with empty key", args: args{"foo::bar"}, want: map[string]string{"foo": ""}, }, { name: "multiple keys with empty key and whitespaces", args: args{"foo : : bar"}, want: map[string]string{"foo": ""}, }, { name: "multiple keys and some empty values", args: args{"foo:bar:baz"}, want: map[string]string{"foo": "", "bar": "baz"}, }, { name: "multiple keys with empty key and values", args: args{"foo:bar::"}, want: map[string]string{"foo": "", "bar": ""}, }, { name: "multiple keys and non-empty values", args: args{"foo:bar baz:qux quux "}, want: map[string]string{"foo": "bar", "baz": "qux quux"}, }, { name: "duplicate keys and non-empty values", args: args{"foo:bar baz:qux quux foo: corge"}, want: map[string]string{"foo": "corge", "baz": "qux quux"}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := getVersionSearchTerms(tt.args.query); !reflect.DeepEqual(got, tt.want) { t.Errorf("getVersionSearchTerms() = %v, want %v", got, tt.want) } }) } } func Test_generateLabelsWhereQuery(t *testing.T) { type args struct { freeTextQuery string } tests := []struct { name string args args wantQuery string wantArgs []interface{} }{ { name: "empty query", args: args{freeTextQuery: ""}, wantQuery: "", wantArgs: nil, }, { name: "empty label key, one label value", args: args{freeTextQuery: " in (GO-RIDE)"}, wantQuery: "", wantArgs: nil, }, { name: "one label key, one label value", args: args{freeTextQuery: "service_type in (GO-RIDE)"}, wantQuery: "(labels @> ?)", wantArgs: []interface{}{`{"service_type": "GO-RIDE"}`}, }, { name: "one label key, one label value with unclosed parenthesis", args: args{freeTextQuery: "service_type in (GO-RIDE"}, wantQuery: "", wantArgs: nil, }, { name: "one label key, empty value", args: args{freeTextQuery: "service_type in ()"}, wantQuery: "", wantArgs: nil, }, { name: "one label key, one label value with invalid chars, should ignore parts before invalid chars", args: args{freeTextQuery: "service%%__type in (GO-RIDE)"}, wantQuery: "(labels @> ?)", wantArgs: []interface{}{`{"__type": "GO-RIDE"}`}, }, { name: "one label key, multiple label values", args: args{freeTextQuery: "service_type in (GO-RIDE, GO-CAR)"}, wantQuery: "(labels @> ? OR labels @> ?)", wantArgs: []interface{}{`{"service_type": "GO-RIDE"}`, `{"service_type": "GO-CAR"}`}, }, { name: "one label key, multiple label values with extra whitespaces and uppercase 'in'", args: args{freeTextQuery: " service_type IN (GO-RIDE , GO-CAR )"}, wantQuery: "(labels @> ? OR labels @> ?)", wantArgs: []interface{}{`{"service_type": "GO-RIDE"}`, `{"service_type": "GO-CAR"}`}, }, { name: "multiple label keys and multiple label values", args: args{freeTextQuery: "service_type in (GO-RIDE, GO-CAR), service2-area in (S-9_G3)"}, wantQuery: "(labels @> ? OR labels @> ?) AND (labels @> ?)", wantArgs: []interface{}{`{"service_type": "GO-RIDE"}`, `{"service_type": "GO-CAR"}`, `{"service2-area": "S-9_G3"}`}, }, { name: "multiple label keys and multiple label values with empty keys", args: args{freeTextQuery: "service_type in (GO-RIDE, GO-CAR), service-area in (SG), in (foo)"}, wantQuery: "(labels @> ? OR labels @> ?) AND (labels @> ?)", wantArgs: []interface{}{`{"service_type": "GO-RIDE"}`, `{"service_type": "GO-CAR"}`, `{"service-area": "SG"}`}, }, { name: "multiple label keys and multiple label values with extra meaningless terms", args: args{freeTextQuery: "service_type in (GO-RIDE, GO-CAR), service-area in (SG), quick, brown in fox, moomin in (troll)"}, wantQuery: "(labels @> ? OR labels @> ?) AND (labels @> ?) AND (labels @> ?)", wantArgs: []interface{}{`{"service_type": "GO-RIDE"}`, `{"service_type": "GO-CAR"}`, `{"service-area": "SG"}`, `{"moomin": "troll"}`}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { gotQuery, gotArgs := generateLabelsWhereQuery(tt.args.freeTextQuery) if gotQuery != tt.wantQuery { t.Errorf("generateLabelsWhereQuery() gotQuery = %v, want %v", gotQuery, tt.wantQuery) } if !reflect.DeepEqual(gotArgs, tt.wantArgs) { t.Errorf("generateLabelsWhereQuery() gotArgs = %v, want %v", gotArgs, tt.wantArgs) } }) } }
/** * @author rajat * Global submit function * Require:Csrf token script should be there on the page otherwise it won't submit */ function onSubmit(dto,action,cookieKey) { console.log("OnSubmit"); var c=$("<form></form>"); c.attr("action","/"+action); c.css("display","none"); c.attr("method","GET"); var cookieValue = "?"; for(var key in dto) { if (dto.hasOwnProperty(key)) { console.log("key:"+dto[key]); var d=$("<input type='hidden'/>"); d.attr("name",key); d.attr("value",dto[key]); c.append(d); cookieValue += key + "=" + dto[key] + "&"; } } if(cookieKey) { $.cookie(cookieKey, cookieValue); } var csrf=$("<input type='hidden'/>"); csrf.attr("name","_csrf"); csrf.attr("value",csrf_token); $("body").append(c); c.submit(); };
#!/bin/sh PASSWORD=${INPUT_PASSWORD}; KEY=${INPUT_KEY}; if [ -z "$KEY" ] && [ -z "$PASSWORD" ]; then echo "🔑 Please provide at least a key or a password..."; exit 0; fi if [[ -n "$KEY" ]]; then echo "🔑 Using key file..."; source /with_key.sh; else echo "🔑 Using password..."; source /with_pass.sh; fi
export {default as chan} from "./chan"
import { getDistance, Vec2, vecTripleProduct, support } from '../../common/math'; import Simplex from './Simplex'; import EPA from './EPA'; import Contact from './Contact'; function GJK (shapeA, shapeB) { // console.warn('START GJK ============================='); // 闵可夫斯基差的单形 let simplex = new Simplex(); // window.simplex = simplex; // 初始化支撑点方向 let d = new Vec2(1, 0); // 第一个支撑点 simplex.add(support(shapeA, shapeB, d)); // console.log('#0', JSON.stringify(simplex.vertexes)); d = d.getOpp(); while (true) { // console.warn('LOOP =================================== '); let flag = simplex.add(support(shapeA, shapeB, d)); // simplex.draw(this.ct); if (simplex.getLast().dot(d) <= 0) { return false; } else { if (simplex.containsOrigion(d)) { // console.log('相交'); return EPA(shapeA, shapeB, simplex); } } } return false; } export { GJK } export default GJK;
<gh_stars>10-100 // Copyright 2011 The Apache Software Foundation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry5.services; import org.apache.tapestry5.MarkupWriter; import org.apache.tapestry5.ValidationDecorator; /** * Creates an instance of {@link org.apache.tapestry5.ValidationDecorator} for a * {@link org.apache.tapestry5.MarkupWriter}. This service is overridden in applications * that do not wish to use the {@linkplain org.apache.tapestry5.BaseValidationDecorator default no-op validation decorator}. * * @since 5.3 * @deprecated Deprecated in 5.4 with no replacement, as {@link ValidationDecorator} is being phased out. */ public interface ValidationDecoratorFactory { /** * Creates a new decorator for the indicated writer. */ ValidationDecorator newInstance(MarkupWriter writer); }
def nav_tree(el): d = {} d['id'] = el.name d['name'] = el.name full_ogden = generate_text(el) # Assuming generate_text function is defined elsewhere preview_ogden = "%s .." % ' '.join(el.textOgden().split()[:10]).replace("div", "span") d['full_ogden'] = full_ogden d['preview_ogden'] = preview_ogden return d
#!/bin/bash # Copyright 2020 The Feverbase Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. cd /root/app source venv/bin/activate python3 fetch.py ./scripts/restart.sh
#!/bin/bash NONE='\033[00m' RED='\033[01;31m' GREEN='\033[01;32m' YELLOW='\033[01;33m' PURPLE='\033[01;35m' CYAN='\033[01;36m' WHITE='\033[01;37m' BOLD='\033[1m' UNDERLINE='\033[4m' runtime="linux-arm" configuration="release" git_commit=$(git log --format=%h --abbrev=7 -n 1) publish_directory="/tmp/City/Release/Publish" release_directory="/tmp/City/Release" project_path="../src/City.Chain/City.Chain.csproj" function check_root { if [ "$(id -u)" != "0" ]; then echo "Sorry, this script needs to be run as root. Do \"sudo su root\" and then re-run this script" exit 1 fi } function DisplayParameters { echo "Publish directory is:" $publish_directory echo "Project file is :" ${project_path} echo "Current directory is:" $PWD echo "Git commit to build :" $git_commit } function compileWallet { echo -e "* Compiling wallet. Please wait, this might take a while to complete..." #dotnet --info mkdir -p $publish_directory dotnet publish $project_path -c $configuration -v m -r $runtime --no-dependencies -o $publish_directory #--self-contained cd $publish_directory tar -cvf $release_directory/City.Chain-$runtime-$git_commit.tar * rm -rf $publish_directory echo -e "${NONE}${GREEN}* Done${NONE}" } clear echo && echo echo -e "${YELLOW} .d8888b. d8b 888 .d8888b. 888 d8b${NONE}" echo -e "${YELLOW} d88P Y88b Y8P 888 d88P Y88b 888 Y8P${NONE}" echo -e "${YELLOW} 888 888 888 888 888 888${NONE}" echo -e "${YELLOW} 888 888 888888 888 888 888 88888b. 8888b. 888 88888b.${NONE}" echo -e "${YELLOW} 888 888 888 888 888 888 888 *88b \"88b 888 888 \"88b${NONE}" echo -e "${YELLOW} 888 888 888 888 888 888 888 888 888 888 .d888888 888 888 888${NONE}" echo -e "${YELLOW} Y88b d88P 888 Y88b. Y88b 888 Y88b d88P 888 888 888 888 888 888 888${NONE}" echo -e "${YELLOW} \"Y8888P\" 888 \"Y888 \"Y88888 \"Y8888P\" 888 888 \"Y888888 888 888 888${NONE}" echo -e "${YELLOW} 888${NONE}" echo -e "${YELLOW} Y8b d88P${NONE}" echo -e "${YELLOW} \"Y88P\"${NONE}" echo -e ${YELLOW} echo -e ${YELLOW} echo -e "${PURPLE}**********************************************************************${NONE}" echo -e "${PURPLE}* ${NONE}This script will compile the full node for ${runtime}. *${NONE}" echo -e "${PURPLE}**********************************************************************${NONE}" echo -e "${BOLD}" read -p "Please run this script as the root user. Do you want to compile full node for ${runtime} (y/n)?" response echo echo -e "${NONE}" if [[ "$response" =~ ^([yY][eE][sS]|[yY])+$ ]]; then check_root DisplayParameters compileWallet echo echo -e "${GREEN} Installation complete. ${NONE}" echo -e "${GREEN} thecrypt0hunter(2018)${NONE}" else echo && echo -e "${RED} Installation cancelled! ${NONE}" && echo fi
require_relative 'base' module PushFormats class Openurl < Base def initialize @to_format = 'openurl' @name = 'OpenURL' super() end end end
<reponame>ArcheSpace/Arche.js import { IColliderShape } from "./IColliderShape"; /** * Interface of physics sphere collider shape. */ export interface ISphereColliderShape extends IColliderShape { /** * Set radius of sphere. * @param radius - The radius */ setRadius(radius: number): void; }
def factorial(n): if n == 0: return 1 else: return n * factorial(n-1) num = 5 print("The factorial of", num, "is", factorial(num))
class Handler(object): def handleSomething(self): print "eggs!"
import React from "react" import { PageProps } from "gatsby" import i18n from "i18next" import SEO from "../../../../components/layout/seo" import { useCustomTranslation } from "../../../../i18n-hook" import { Comments } from "../../../../components/core/comments" import translationFr from "../../../../locales/fr/asia/japan/tokyo/ueno-park.json" import translationEn from "../../../../locales/en/asia/japan/tokyo/ueno-park.json" import { JapanBlogLayout, JapanTitle } from "../../../../components/core/japan/japan" import { How, HowLong, Introduction, SectionContent, Visit, WhatTimeOfYear, When, Where, WhereToHave, WhereToStay, } from "../../../../components/core/section" import { Conclusion } from "../../../../components/core/conclusion" import { Divider } from "../../../../components/core/divider" import { Quote } from "../../../../components/core/quote" import HomeImgUrl from "../../../../images/asia/japan/tokyo/ueno-park/ueno-park-main.jpg" import { GroupOfImages, ImageAsLandscape, ImageAsLandscapeOnTheLeft, ImageAsLandscapeOnTheRight, ImageAsPortrait, ImageAsPortraitOnTheLeft, ImageAsPortraitOnTheRight, TwoImagesSameSizeOrToGroup, } from "../../../../components/images/layout" import { SharedCardJapanImages } from "../../../../components/images/asia/japan/shared-card-japan-images" import { UenoParkImages } from "../../../../components/images/asia/japan/tokyo/ueno-park" import { ExternalLinkNotUnderlined } from "../../../../components/core/links/link" import { buildPixabayUrl } from "../../../../utils" import { BookingCard, BookingWarning } from "../../../../components/core/booking" import { BookingGygCardContainer } from "../../../../components/layout/layout" import keiseiHotel from "../../../../images/asia/japan/tokyo/ueno-park/apa-hotel-keisei-ueno-ekimae.jpg" import inaricho from "../../../../images/asia/japan/tokyo/ueno-park/apa-hotel-ueno-inaricho-ekikita.jpg" import resolHotel from "../../../../images/asia/japan/tokyo/ueno-park/resol-hotel.jpg" const namespace = "asia/japan/tokyo/ueno-park" const id = "ueno-park" i18n.addResourceBundle("fr", namespace, translationFr) i18n.addResourceBundle("en", namespace, translationEn) const IndexPage: React.FunctionComponent<PageProps> = ({ location }) => { const { t, i18n } = useCustomTranslation([namespace, "common"]) const title = t(`common:country.japan.card.${id}`) return ( <> <SEO title={title} fullTitle={t("full-title")} socialNetworkDescription={t("social-network-description")} googleDescription={t("google-description")} image={HomeImgUrl} location={location} /> <JapanBlogLayout page={id} location={location}> <JapanTitle title={title} linkId={id} /> <ImageAsLandscape> <SharedCardJapanImages image="uenoPark" /> </ImageAsLandscape> <Quote>{t("quote")}</Quote> <Divider /> <Introduction>{t("introduction")}</Introduction> <Divider /> <Where title={t("where.title")}> <p>{t("where.part1")}</p> </Where> <When title={t("when.title")}> <p>{t("when.part1")}</p> </When> <How title={t("how.title")}> <ul> <li>{t("how.part1")}</li> <li>{t("how.part2")}</li> </ul> <p>{t("how.part3")}</p> <p>{t("how.part4")}</p> </How> <HowLong title={t("how-long.title")}> <p>{t("how-long.part1")}</p> <p>{t("how-long.part2")}</p> <p>{t("how-long.part3")}</p> </HowLong> <WhatTimeOfYear title={t("what-time-of-year.title")}> <p>{t("what-time-of-year.part1")}</p> <p>{t("what-time-of-year.part2")}</p> <p>{t("what-time-of-year.part3")}</p> </WhatTimeOfYear> <WhereToStay title={t("where-to-stay.title")}> <p>{t("where-to-stay.part1")}</p> <BookingGygCardContainer> <BookingCard hotel="jp/apahoteru-shang-ye-dao-he-ting-yi-bei" title="APA Hotel Ueno Inaricho Ekikita" image={inaricho} note="8,4" price={46} people={2} kind="hotel" /> <BookingCard hotel="jp/apa-keisei-ueno-ekimae" title="APA Hotel Keisei Ueno Ekimae" image={keiseiHotel} note="8,1" price={70} people={2} kind="hotel" /> <BookingCard hotel="jp/hotel-resol-ueno" title="Hotel Resol Ueno" image={resolHotel} note="8,7" price={100} people={2} kind="hotel" /> </BookingGygCardContainer> <BookingWarning>{t("where-to-stay.part2")}</BookingWarning> <p>{t("where-to-stay.part3")}</p> </WhereToStay> <WhereToHave title={t("where-to-have.title")}> <p>{t("where-to-have.part1")}</p> <p>{t("where-to-have.part2")}</p> <ImageAsPortrait> <UenoParkImages image="restaurant" /> </ImageAsPortrait> </WhereToHave> <Visit title={t("visit.title")}> <section> <SectionContent> <p>{t("visit.part1")}</p> <p>{t("visit.part2")}</p> <p>{t("visit.part3")}</p> <GroupOfImages> <ImageAsLandscape> <UenoParkImages image="park" /> </ImageAsLandscape> <ImageAsLandscapeOnTheLeft> <UenoParkImages image="park2" /> </ImageAsLandscapeOnTheLeft> <ImageAsLandscapeOnTheRight> <UenoParkImages image="park3" /> </ImageAsLandscapeOnTheRight> <ImageAsPortraitOnTheLeft> <UenoParkImages image="park4" /> </ImageAsPortraitOnTheLeft> <ImageAsPortraitOnTheRight> <UenoParkImages image="park5" /> </ImageAsPortraitOnTheRight> </GroupOfImages> <p>{t("visit.part4")}</p> <GroupOfImages> <ImageAsLandscape> <UenoParkImages image="park6" /> </ImageAsLandscape> <ImageAsLandscape> <UenoParkImages image="park7" /> </ImageAsLandscape> </GroupOfImages> <p>{t("visit.part5")}</p> <p>{t("visit.part6")}</p> <p>{t("visit.part7")}</p> <p>{t("visit.part8")}</p> <GroupOfImages> <ImageAsLandscape> <UenoParkImages image="park8" /> </ImageAsLandscape> <TwoImagesSameSizeOrToGroup> <UenoParkImages image="park9" /> <UenoParkImages image="park10" /> </TwoImagesSameSizeOrToGroup> <ImageAsPortrait> <UenoParkImages image="park11" /> </ImageAsPortrait> </GroupOfImages> <p>{t("visit.part9")}</p> <p>{t("visit.part10")}</p> <GroupOfImages> <ImageAsLandscape credit={ <ExternalLinkNotUnderlined href={buildPixabayUrl(i18n.languageCode)("users/yui_ma-406082")}> yui_ma </ExternalLinkNotUnderlined> } > <UenoParkImages image="park12" /> </ImageAsLandscape> <ImageAsLandscape credit={ <ExternalLinkNotUnderlined href={buildPixabayUrl(i18n.languageCode)("users/yui_ma-406082")}> yui_ma </ExternalLinkNotUnderlined> } > <UenoParkImages image="park13" /> </ImageAsLandscape> </GroupOfImages> <p>{t("visit.part11")}</p> <GroupOfImages> <ImageAsLandscape> <UenoParkImages image="park14" /> </ImageAsLandscape> <ImageAsLandscape> <UenoParkImages image="park15" /> </ImageAsLandscape> <TwoImagesSameSizeOrToGroup> <UenoParkImages image="park16" /> <UenoParkImages image="park17" /> </TwoImagesSameSizeOrToGroup> <ImageAsLandscape> <UenoParkImages image="park18" /> </ImageAsLandscape> </GroupOfImages> </SectionContent> </section> </Visit> <Divider /> <Conclusion> <p>{t("conclusion")}</p> <ul> <li>{t("question1")}</li> <li>{t("question2")}</li> </ul> <p>{t("love")}</p> </Conclusion> <Divider /> <Comments collectionName={namespace} location={location} facebookQuote={`${t("facebook.part1")}\n${t("facebook.part2")}`} pinterest={{ description: t("pinterest"), nodes: i18n.languageCode === "fr" ? [<UenoParkImages image="cardFr1" key="cardFr1" />, <UenoParkImages image="cardFr2" key="cardFr1" />] : [<UenoParkImages image="cardEn1" key="cardEn1" />, <UenoParkImages image="cardEn2" key="cardEn1" />], }} /> </JapanBlogLayout> </> ) } export default IndexPage
#!/bin/bash PACKAGES=( 'gphoto2' 'ffmpeg' 'v4l2loopback-dkms' 'v4l-utils' 'python3' 'python3-gphoto2' 'python3-psutil' 'python3-zmq' ) function info { echo -e "\033[0;36m${1}\033[0m" } function error { echo -e "\033[0;31m${1}\033[0m" } if [ $UID != 0 ]; then error "ERROR: Only root is allowed to execute the installer. Forgot sudo?" exit 1 fi #Param 1: Question / Param 2: Default / silent answer function ask_yes_no { read -p "${1}: " -n 1 -r } info "This script installs some dependencies and simplifies the setup for using gphoto2 as webcam." info "It installs required dependencies and sets up a virtual webcam that gphoto2 can stream video to." info "It can remove the gphoto2 webcam setup, as well." info "" echo "Your options are:" echo "1 Install gphoto2 webcam" echo "2 Remove gphoto2 webcam" echo "3 Nothing" info "" ask_yes_no "Please enter your choice" "3" info "" if [[ $REPLY =~ ^[1]$ ]]; then info "### Installing required software..." for package in "${COMMON_PACKAGES[@]}"; do if [ "$(dpkg-query -W -f='${Status}' "${package}" 2>/dev/null | grep -c "ok installed")" -eq 1 ]; then info "[Package] ${package} installed already" else info "[Package] Installing missing package: ${package}" apt install -y "${package}" fi done info "All required software was installed." info "" info "Note: Installing gphoto2 as webcam disables other webcams." info "" ask_yes_no "Do you want to setup gphoto2 as a webcam? (y/n)" "n" info "" if [[ $REPLY =~ ^[Yy]$ ]] then info "### Installing gphoto2 webcam service." cp 'ffmpeg-webcam.service' '/etc/systemd/system/ffmpeg-webcam.service' cp 'ffmpeg-webcam.sh' '/usr/ffmpeg-webcam.sh' chmod +x '/usr/ffmpeg-webcam.sh' systemctl start ffmpeg-webcam.service systemctl enable ffmpeg-webcam.service info "gphoto2 webcam service installed and running..." fi elif [[ $REPLY =~ ^[2]$ ]]; then info "### Stopping and removing gphoto2 webcam service." systemctl stop ffmpeg-webcam.service systemctl disable ffmpeg-webcam.service rm '/usr/ffmpeg-webcam.sh' rm '/etc/systemd/system/ffmpeg-webcam.service' info "gphoto2 webcam service stopped and removed..." else info "Okay... doing nothing!" fi exit 0
/* $Id: maindefs.h,v 1.3 2003/02/14 00:35:22 elf Exp $ */ #define TIMESTAMP ".xmotd" /* the date of this file, in user's * home-directory, is used to * determine whether the motd has * changed and should be displayed*/ #define USAGESTRING "Usage:\n%s [X-toolkit-options] [options] {{file [file2 ...]} {dir/}}\n" #define BAD_BITMAP_MESSAGE "Error reading bitmap file: %s.\nPossible causes:\n the file does not exist (perhaps you mis-typed the name),\n or it is not a valid X bitmap,\n or there is insufficient memory to allocate the bitmap.\n" #define NEXT_MESSAGE_LABEL "Next Message" #define LAST_MESSAGE_LABEL "Dismiss" #define BROWSER "/usr/local/bin/netscape" #define ATOM "xmotd"
#!/usr/bin/env bash if [ -z ${TORSETUP_DIR+x} ]; then _DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" grep -q "/lib" <<< "$_DIR" && TORSETUP_DIR=$(dirname "$_DIR") || TORSETUP_DIR="$_DIR" fi # if [ -z ${TORSETUP_FUNCS_LOADED+x} ]; then # source "${TORSETUP_DIR}/lib/functions.sh" || { # >&2 echo "${BOLD}${RED}[questions.sh] CRITICAL ERROR: Could not load functions file at '${TORSETUP_DIR}/lib/functions.sh' - Exiting!${RESET}" # exit 1 # } # fi [ ! -z ${TORSETUP_FUNCS_LOADED+x} ] || source "${TORSETUP_DIR}/lib/functions.sh" || { >&2 echo "${BOLD}${RED}[questions.sh] CRITICAL ERROR: Could not load functions file at '${TORSETUP_DIR}/lib/functions.sh' - Exiting!${RESET}" exit 1 } TORSETUP_QUESTIONS_LOADED='y' # Used to detect if this file has already been sourced or not # Ask user for the node nickname they want # Output variable(s): TOR_NICKNAME q_nickname() { ask "What nickname should we use for your node, for example 'JohnDoeExitSE' ${RESET}>>> " \ TOR_NICKNAME "ERROR: Please enter a nickname." n yes } # Ask user for the rate limit + burst limit they want, in mbps # NOTE: you probably want to call q_rate_limit instead, which asks whether the user wants rate limiting or not # Output variable(s): TOR_RATE_MBPS (integer) TOR_BURST_MBPS (integer) set_rate_limit() { local ratenum burstnum while true; do msg ask "Please enter how many megabits per second (mbps) your Tor node should aim to stay under (RelayBandwidthRate) ${RESET}>>> " \ TOR_RATE_MBPS "ERROR: Please enter a valid number for the RelayBandwidthRate question." number msg msg yellow "NOTE: For the following question - RelayBandwidthBurst should be EQUAL TO, or GREATER than RelayBandwidthRate - do not enter 0 or a lower rate." ask "Please enter how many megabits per second (mbps) your Tor node should use at most (RelayBandwidthBurst) ${RESET}>>> " \ TOR_BURST_MBPS "ERROR: Please enter a valid number for the RelayBandwidthRate question." number msg "\n$_LN\n" msg green "You entered for the rate limit (RelayBandwidthRate): $TOR_RATE_MBPS megabits per second (mbps)" msg green "You entered for the burst rate (RelayBandwidthBurst): $TOR_BURST_MBPS megabits per second (mbps)" msg "\n$_LN\n" if yesno "Are the above numbers correct? (Y/n) > " defyes; then msg green "Great! Let's continue with the setup." break else msg yellow "Oh no :( - We'll ask you the questions again so you can fix them." fi done } # Ask user if they want to set up rate limiting for their node, then prompt for rate limit + burst limit # Output variable(s): TOR_USE_LIMIT (y/n) TOR_RATE_MBPS (integer) TOR_BURST_MBPS (integer) q_rate_limit() { msg bold purple "------------- Network rate limiting ------------- " msg msg green "If your Privex server has a ${BOLD}limited amount of bandwidth per month${RESET}${GREEN}, then you should configure a network speed limit." msg msg green "If your Privex server has ${BOLD}'unmetered'${RESET}${GREEN} networking, e.g. our Tor node packages, then you do not need to configure a network speed limit, unless you" msg green "plan to run other applications on this server, and would like to ensure some of your network capacity is kept available for other applications." msg msg bold cyan "Examples of speed limits and how much bandwidth may be used:\n" msg green "\t A speed limit of ${BOLD}10 megabits per second (10mbps)${RESET}${GREEN} would generally result in bandwidth usage of up to" \ "108 gigabytes per day, or 3.3 terabytes per month.\n" msg green "\t A speed limit of ${BOLD}50 megabits per second (50mbps)${RESET}${GREEN} would generally result in bandwidth usage of up" \ "to 540 gigabytes per day, or 17 terabytes per month.\n" if yesno "${BOLD}${BLUE}Do you want to set a network speed rate limit on your Tor node? (y/n) ${RESET}>>> "; then TOR_USE_LIMIT="y" msg bold purple "\n------------- Understanding megabits ------------- \n" msg msg bold "The following questions expect a plain number representing mega*BITS* per second." msg "Megabits are usually expressed as 'mbps' (megabits per second), while megabytes are usually mb/s or mbyte/s" msg "Eight (8) megabits is equal to One (1) megabyte. 100 mbit/s == 12.5 mbyte/s, 1 gigabit (gbps) == 1000mbps == 125 mbyte/s" msg msg green "Two types of speed limits can be configured, ${BLUE}${BOLD}RelayBandwidthRate${RESET}${GREEN} (rate limit) and" \ "${BLUE}${BOLD}RelayBandwidthBurst${RESET}${GREEN} (temporary bursts)\n" msg "\t - ${BLUE}${BOLD}RelayBandwidthRate${RESET}, the average network speed that your Tor node should aim to stay under\n" msg "\t - ${BLUE}${BOLD}RelayBandwidthBurst${RESET}, the 'burst' rate, if set higher than the normal rate, then occasionally the network speed may increase" msg "\t up to this speed for a short period of time (e.g. only for a few minutes every hour)" set_rate_limit fi } # Ask user if they run any other tor relays/exits, then prompt for their fingerprints if known # Output variable(s): TOR_USE_FAMILY (y/n) TOR_FINGERPRINTS (ABCD123 DEFA456 DBCA890) q_family() { msg bold purple "\n------------- Tor Node Family -------------\n" msg bold yellow "NOTE:${RESET}${YELLOW} If the only other node you're operating is a ${BOLD}${CYAN}BRIDGE node${RESET}${YELLOW}, enter no for the next question." msg yellow "Tor bridges are supposed to be kept hidden, so they should not be entered in your Tor node's family config.\n" if yesno "${BOLD}${BLUE}Do you operate any other Tor relays or exit nodes? (y/n) ${RESET}>>> "; then msg msg purple "It's strongly recommended to configure your 'Tor node family', which is a list of fingerprints of other Tor nodes which you operate." msg purple "This is used to ensure Tor clients will never build a path with more than one of your nodes in the Tor path." msg purple "Please find the fingerprints of your other Tor relays / exit nodes. You can generally find the fingerprint by running:\n" msg "\tsudo cat /var/lib/tor/fingerprint\n" msg purple "on each Tor node. You only need the hexadecimal fingerprints which look like: ${BOLD}ABCD1234ABCD1234" msg msg green "Alternatively, you can search for your relays/exits and find their fingerprints using the Tor project's Relay Search tool:" msg "\n\t https://metrics.torproject.org/rs.html \n" if yesno "${BOLD}${BLUE}Do you wish to configure a Tor node family? (Y/n) ${RESET}>>> " defyes; then TOR_USE_FAMILY="y" msg ask "Please enter each fingerprint of your other Tor nodes, separated by a space ${RESET}>>> " \ TOR_FINGERPRINTS "ERROR: Please enter one or more fingerprints, separated by spaces." n yes fi fi } # Detect if user has IPv4, otherwise ask them to find their IPv4 address # NOTE: you probably want to call detect_ips instead, which calls both detect_ipv4 and detect_ipv6 # Output variable(s): HAS_IP4 (y/n) IPV4_ADDRESS (1.2.3.4) detect_ipv4() { msg yellow " [-] Checking if your server has IPv4 support...\n" if myip4 > /dev/null; then IPV4_ADDRESS=$(myip4) msg green " [+] SUCCESS. IPv4 address detected: ${IPV4_ADDRESS}\n" HAS_IP4='y' return 0 fi if [ ! -z ${AUTO_IP_NO_PROMPT+x} ] && [[ "$AUTO_IP_NO_PROMPT" == "y" ]]; then >&2 msg red " [!!!] Failed to detect public IPv4 address. Skipping prompt as AUTO_IP_NO_PROMPT is enabled" >&2 msg red " [!!!] As setting up a Tor node requires IPv4, returning non-zero code to abort setup..." return 1 fi ip -4 addr msg bold red "ERROR: We could not automatically determine your primary IPv4 address\n" msg yellow "Please look at your server's IPv4 configuration above, and tell us what your PUBLIC IPv4 address is (excluding the /xx subnet portion)." msg yellow "Privex servers normally have an IP address that looks like: 185.130.44.xx " msg yellow "IP addresses which look like '10.x.x.x', '192.168.x.x' or '172.16.x.x' are NOT public IPv4 addresses." msg ask "Please enter your server's public IPv4 address (excluding the /xx subnet portion) ${RESET}>>> " \ IPV4_ADDRESS "ERROR: Please enter your server's public IPv4 address!" n yes HAS_IP4='y' } # Detect if user has IPv6, otherwise ask them to find their IPv6 address # NOTE: you probably want to call detect_ips instead, which calls both detect_ipv4 and detect_ipv6 # Output variable(s): HAS_IP6 (y/n) IPV6_ADDRESS (2a07:e01:abc:def::2) detect_ipv6() { msg yellow " [-] Checking if your server has IPv6 support...\n" if myip6 > /dev/null; then IPV6_ADDRESS=$(myip6) msg green " [+] SUCCESS. IPv6 address detected: ${IPV6_ADDRESS}\n" HAS_IP6='y' return 0 fi HAS_IP6='n' if [ ! -z ${AUTO_IP_NO_PROMPT+x} ] && [[ "$AUTO_IP_NO_PROMPT" == "y" ]]; then msg red " [!!!] Failed to detect public IPv6 address. Skipping prompt as AUTO_IP_NO_PROMPT is enabled. " return 0 fi ip -6 addr msg bold red "ERROR: We could not automatically determine your primary IPv6 address\n" msg yellow "Please look at your server's IPv6 configuration above, and tell us what your PUBLIC IPv6 address is (excluding the /xx subnet portion)." msg yellow "Privex servers normally have an IPv4 address that looks like: 2a07:e01:ab:c1::2 " msg yellow "IPv6 addresses which begin with 'fe80:' are NOT public IPv6 addresses." msg msg bold yellow "If your server DOES NOT have a public IPv6 address, please just leave the answer blank and press ENTER to disable IPv6 on your Tor node." ask "Please enter your server's public IPv6 address (excluding the /xx subnet portion) ${RESET}>>> " IPV6_ADDRESS "" allowblank yes msg if [ -z "$IPV6_ADDRESS" ]; then msg red " [!!!] Your response was empty. We'll assume you don't have IPv6 support and disable IPv6 on your Tor node.\n" else msg green " >> Your response was non-empty: $IPV6_ADDRESS\n" msg green " [+] Enabling IPv6 support for your Tor node :)\n" HAS_IP6='y' fi } # Detect public IPv4 / IPv6 of this node # Output variable(s): HAS_IP4 (y/n) HAS_IP6 (y/n) IPV4_ADDRESS (1.2.3.4) IPV6_ADDRESS (2a07:e01:abc:def::2) detect_ips() { msg bold green " >>> Automatically detecting your server's public IPv4 and IPv6 addresses...\n" detect_ipv4 detect_ipv6 msg bold green " +++ Finished IP address configuration \n" } # Ask user if they want to use our reduced exit node policy # NOTE: you probably want to use q_is_exit instead, which calls this function. # Output variable(s): TOR_REDUCED_EXIT (y/n) q_reduced_exit() { msg purple "To help reduce the amount of abuse emails that a Tor exit node produces, as well as helping prevent malicious uses of the Tor network" msg purple "such as email spam, SSH and SQL database brute forcing, and malicious persons using exploits, ${BOLD}we've included a Reduced Exit Policy." msg msg purple "The ${BOLD}'Reduced Exit Policy'${RESET}${MAGENTA} we include, is based on the official reduced exit policy from the" \ "Tor wiki https://trac.torproject.org/projects/tor/wiki/doc/ReducedExitPolicy" msg purple "and has been ${BOLD}slightly modified and updated by Privex${RESET}${MAGENTA} to both reduce the amount of abuse reports caused by Tor exit nodes" msg purple "without impacting the majority of Tor users - plus some new service port whitelist additions". msg msg purple "If you're not using this setup tool on a Privex server, or you've been otherwise authorized to run your exit without a reduced exit policy, then" msg purple "you'll be given the option to use a standard 'allow everything' exit policy after the following question:" msg if yesno "${BOLD}${BLUE}Would you like to view the included Reduced Exit Policy? (Y/n) ${RESET}>>> " defyes; then msg bold green "# ------- Reduced exit policy at ${EXIT_TORRC} -------" cat "$EXIT_TORRC" msg bold green "# ------- End of reduced exit policy at ${EXIT_TORRC} -------" fi msg msg if yesno "${BOLD}${BLUE}Do you want to use the included Reduced Exit Policy (strongly recommended)? (Y/n) ${RESET}>>> " defyes; then msg green "\n [+] Setting 'Use reduced exit policy' to ${BOLD}YES\n" TOR_REDUCED_EXIT="y" else msg yellow "\n [+] Setting 'Use reduced exit policy' to ${BOLD}${RED}NO\n" msg purple "Your exit policy will be set to allow everything: ${BOLD}ExitPolicy accept *:*\n" fi } # Ask user if they want to run an exit node, and offer reduced exit node policy # Output variable(s): TOR_IS_EXIT (y/n) TOR_REDUCED_EXIT (y/n) q_is_exit() { msg purple "If you say no to the following question, then your Tor node will be configured as a non-exit relay." if yesno "${BOLD}${BLUE}Do you want this Tor node to be an EXIT node? (y/n) ${RESET}>>> "; then TOR_IS_EXIT="y" msg green "\n [+] Configuring your node as a Tor ${BOLD}Exit Node\n" ####################### # REDUCED EXIT POLICY # ####################### q_reduced_exit else msg green "\n [+] Configuring your node as a Tor ${BOLD}Relay Node (Not an Exit)\n" fi } # Print reverse DNS for $IPV4_ADDRESS and $IPV4_ADDRESS (if HAS_IP6 is 'y') show_rdns() { msg msg green "\t IPv4 address:\t ${BOLD}${IPV4_ADDRESS}" msg green "\t IPv4 Reverse DNS:\t ${BOLD}$(dig +short -x $IPV4_ADDRESS)" msg if [[ "$HAS_IP6" == 'y' ]]; then msg green "\t IPv6 address:\t ${BOLD}${IPV6_ADDRESS}" msg green "\t IPv6 Reverse DNS:\t ${BOLD}$(dig +short -x $IPV6_ADDRESS)" else msg yellow "\t IPv6 address not found / configured on this server. Skipping rDNS." fi msg } # Show user current IPv4 / v6 reverse DNS, then ask user for planned reverse DNS (for HTML display) # Output variable(s): TOR_RDNS e.g. www.example.com OR 'n/a' q_rdns() { msg bold purple "Reverse DNS configuration" msg purple "For the Tor server notice we'll display on port 80 (HTTP), we display your server's Reverse DNS (rDNS)" msg purple "Below is the current reverse DNS published for your IPv4 and IPv6 (if enabled) addresses" show_rdns msg purple "If you don't yet have a reverse DNS which makes it clear that this is a Tor node, we strongly recommend setting" msg purple "up reverse DNS which matches a domain you own, such as ${BOLD}tor-exit-node1.mydomain.com" msg purple "You can do this either via your provider's server panel, or by emailing their customer support." msg msg purple "If you plan to set up reverse DNS later, you can just enter the domain you plan to have set on your IPv4/v6 rDNS" msg purple "If this is only a relay, then reverse DNS isn't too important - you can skip it just by typing 'n/a'" msg ask "Please enter the (planned) rDNS domain you'd like displayed on the Tor notice page, e.g. tor-exit.mydomain.com ${RESET}>>> " \ TOR_RDNS "ERROR: Please enter a domain." n yes } # Ask user for node operator name / contact details # Output variable(s): TOR_NODE_OPERATOR e.g. John Doe (www.example.com) q_operator() { msg bold purple "Node operator contact information" msg msg purple "This will be displayed on your Tor node's metadata when people look at lists of tor relays/exits" msg purple "This will also be shown on the ${BOLD}Tor HTML notice${RESET}${MAGENTA} that will be available on this server's port 80." msg purple "Common formatting examples:" msg msg purple "\ta simple name or username, e.g. ${BOLD}John Doe" msg purple "\ta name + website e.g. ${BOLD}ExampleCo Ltd. (https://example.org)" msg purple "\ta name + email e.g. ${BOLD}Dave (dave [at] example (.) org)" msg msg purple "It's important (but not mandatory) that you include *some* way of contacting you / others with access to the node, as the contact information" msg purple "can sometimes be used to alert a node operator of an urgent security issue with their node, or misconfigurations that are causing problems." msg bold purple "NOTE: (For exit operators!) The contact information that you publish for your Tor node generally DOES NOT get used for spammy abuse reports.\n" \ " Generally, abuse reports are sent to the abuse email listed on WHOIS databases for the IP address which caused the abuse." msg ask "Please enter the operator name and/or contact details to be published for your Tor node ${RESET}>>> " \ TOR_NODE_OPERATOR "ERROR: Please enter your operator name / contact info." n yes } # Ask user for expected domain pointed to their node # Output variable(s): TOR_HAS_DOMAIN (y/n) TOR_DOMAIN (format: example.com or 1.2.3.4 if no domain) q_domain() { if yesno "${BOLD}${BLUE}Do you have a domain/subdomain pointing at this tor node (or plan to point one soon)? (y/N) >> ${RESET}" defno; then TOR_HAS_DOMAIN='y' ask "Please enter the domain (e.g. tor-node.example.com ) you're pointing (or plan to) at this Tor server ${RESET}>>> " \ TOR_DOMAIN "ERROR: Please enter your operator name / contact info." n yes msg green "\n [+] Setting your Tor node's public domain to ${BOLD}${TOR_DOMAIN}\n" else TOR_HAS_DOMAIN='n' TOR_DOMAIN="$IPV4_ADDRESS" msg red "\n [!!!] No public domain... We'll just display your IPv4 address '$IPV4_ADDRESS' instead.\n" fi } # Ask user for expected network speed of their node (for directory HTML display) # Output variable(s): TOR_NETWORK_SPEED (format: 100mbps) q_net_speed() { msg msg purple " (?) For the below question. On the Tor HTML notice (aka tor directory port notice), we display a network speed, which allows people to find out" msg purple " how much capacity this Tor node can handle / is handling." msg msg purple " If you set a rate limit earlier, for example 40mbps rate limit with 80mbps burst, then you should answer the next question with: 40-80 mbps" msg purple " If you didn't set a rate limit, then enter your servers network speed in mbps, for example: 100mbps" msg msg purple " If you ${BOLD}don't know your server's network speed${RESET}${MAGENTA}, just enter: 20mbps (it can be changed later)" msg ask "What network speed should be displayed on the Tor HTML notice? ${RESET}>>> " \ TOR_NETWORK_SPEED "ERROR: Please enter your network speed, e.g. 100mbps" n yes } tor_summary() { msg bold green "##########################################################" msg bold green "# #" msg bold green "# #" msg bold green "# Configuration Summary #" msg bold green "# #" msg bold green "# #" msg bold green "##########################################################" msg [[ "$TOR_REDUCED_EXIT" == "y" ]] && _REX='YES' || _REX='NO' [[ "$TOR_USE_LIMIT" == "y" ]] && _RL='YES' || _RL='NO' [[ "$TOR_USE_FAMILY" == "y" ]] && _HVFAM='YES' || _HVFAM='NO' _EX_MSG="Node type\t${BOLD}EXIT NODE" _RP_MSG="Reduced exit policy\t${BOLD}${_REX}" { [[ "$HAS_IP4" == "y" ]] && msg green "IPv4 Address\t${BOLD}${IPV4_ADDRESS}\n" [[ "$HAS_IP6" == "y" ]] && msg green "IPv6 Address\t${BOLD}${IPV6_ADDRESS}\n" msg green "Node nickname\t${BOLD}${TOR_NICKNAME}" [[ "$TOR_IS_EXIT" == "y" ]] && msg green "$_EX_MSG" && msg green "$_RP_MSG" || msg green "Node type\t${BOLD}Relay node (not an exit)" msg green "Have other Tor nodes?\t${BOLD}$_HVFAM" [[ "$TOR_USE_FAMILY" == "y" ]] && msg green "Family fingerprints\t${BOLD}$TOR_FINGERPRINTS" msg green "Rate limiting\t${BOLD}$_RL" [[ "$TOR_USE_LIMIT" == "y" ]] && msg green "Limit // Burst\t${BOLD}$TOR_RATE_MBPS mbps // $TOR_BURST_MBPS mbps" msg green "Reverse DNS\t${BOLD}$TOR_RDNS" msg green "Operator Name/Contact\t${BOLD}${TOR_NODE_OPERATOR}" msg green "Domain for the node\t${BOLD}${TOR_DOMAIN}" msg green "Network speed\t${BOLD}${TOR_NETWORK_SPEED}"; } | column -t -s $'\t' msg "\n$_LN\n" }
<reponame>Project-ITSOL-Selling/front-end-prime import {Component, OnInit} from '@angular/core'; import {FormBuilder, FormGroup} from '@angular/forms'; import {NgbModal} from '@ng-bootstrap/ng-bootstrap'; import {NgxSpinnerService} from 'ngx-spinner'; import {DEFAULT_MODAL_OPTIONS} from '../../@core/app-config'; import {BillDetailService} from '../../@core/services/_service/bill-detail.service'; import {ActionBillDetailComponent} from './action-bill-detail/action-bill-detail.component'; import {DeleteBillDetailComponent} from './delete-bill-detail/delete-bill-detail.component'; @Component({ selector: 'ngx-bill-detail', templateUrl: './bill-detail.component.html', styleUrls: ['./bill-detail.component.scss'], }) export class BillDetailComponent implements OnInit { formSearch: FormGroup; listBillDetail: any[] = []; total: any; lstDel: any[] = []; lstDataSearch: any[] = []; constructor( private modal: NgbModal, private fb: FormBuilder, private service: BillDetailService, private spinner: NgxSpinnerService, ) { } ngOnInit(): void { this.processSearchData(); } // initForm() { // this.formSearch = this.fb.group({ // price: [''], // }); // } // getDataSearch() { // this.service.getListBillOrder().subscribe(res => { // this.lstDataSearch = res.data; // }); // } // // processSearch(event?: any) { // // @ts-ignore // this.processSearchData(event); // } processEdit(item: any) { const modalRef = this.modal.open(ActionBillDetailComponent, DEFAULT_MODAL_OPTIONS); modalRef.componentInstance.action = false; modalRef.componentInstance.billDetail = item; modalRef.result.then(value => { if (value === 'success') { this.processSearchData(); } }, ); } processSearchData(event?: any) { this.spinner.show(); this.service.getListBillDetail().subscribe(res => { this.spinner.hide(); this.listBillDetail = res.data; this.total = res.recordsTotal; }); } processSave() { const modalRef = this.modal.open(ActionBillDetailComponent, DEFAULT_MODAL_OPTIONS); modalRef.componentInstance.action = true; modalRef.result.then(value => { if (value === 'success') { this.processSearchData(); } }, (reason) => { }); } processDelete(id: any) { const modalRef = this.modal.open(DeleteBillDetailComponent, DEFAULT_MODAL_OPTIONS); modalRef.componentInstance.idBillDetail = id; modalRef.result.then(value => { if (value === 'success') { this.processSearchData(); } }, (reason) => { }); } close() { // @ts-ignore this.modal.close(); } }
package ch.bernmobil.vibe.shared.contract; /** * Database contract to define table name and column name for {@link ch.bernmobil.vibe.shared.entity.Stop}. * * @author <NAME> * @author <NAME> */ @SuppressWarnings("ALL") public final class StopContract { public static final String TABLE_NAME = "stop"; public static final String ID = "id"; public static final String NAME = "name"; public static final String AREA = "area"; public static final String UPDATE = "update"; public static final String[] COLUMNS = {ID, NAME, AREA, UPDATE}; private StopContract() {} }
def matrix_multiplication(A, B): # Matrix to store the result result = [[0 for _ in range(len(B[0]))] for _ in range(len(A))] # Multiply matrices if the inner dimensions are equal if len(A[0]) == len(B): # Iterate through rows of A for i in range(len(A)): # Iterate through columns of B for j in range(len(B[0])): # Iterate through rows of B for k in range(len(B)): result[i][j] += A[i][k] * B[k][j] return result else: return "Matrices are of incompatible sizes for multiplication!"
<filename>fame-server/src/main/java/com/designre/blog/model/dto/TokenDto.java package com.designre.blog.model.dto; import lombok.Data; @Data public class TokenDto { private String token; private String refreshToken; }
import React from 'react' import Layout from '../components/layout' import Seo from '../components/seo' const IndexPage = () => ( <Layout> <Seo title="Dan" /> <h1>Hello there.</h1> <p> I am a full-stack developer. JavaScript, Web security and Postgres are my current passions. </p> <p> There are other technologies that I am interested in. Systems programming through Rust. WASM. CSS painting. Virtual Reality. </p> <p> My goal is to showcase projects without having to tinker within frameworks too much. Projects will be hosted separately and the write ups will be in markdown. So be sure to check back occasionally for new content! </p> </Layout> ) export default IndexPage
#!/usr/bin/env bash # PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here # will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent # changes to this script, consider a proposal to conda-smithy so that other feedstocks can also # benefit from the improvement. set -xeuo pipefail export FEEDSTOCK_ROOT="${FEEDSTOCK_ROOT:-/home/conda/feedstock_root}" source ${FEEDSTOCK_ROOT}/.scripts/logging_utils.sh ( endgroup "Start Docker" ) 2> /dev/null ( startgroup "Configuring conda" ) 2> /dev/null export PYTHONUNBUFFERED=1 export RECIPE_ROOT="${RECIPE_ROOT:-/home/conda/recipe_root}" export CI_SUPPORT="${FEEDSTOCK_ROOT}/.ci_support" export CONFIG_FILE="${CI_SUPPORT}/${CONFIG}.yaml" cat >~/.condarc <<CONDARC conda-build: root-dir: ${FEEDSTOCK_ROOT}/build_artifacts CONDARC GET_BOA=boa BUILD_CMD=mambabuild conda install --yes --quiet "conda-forge-ci-setup=3" conda-build pip ${GET_BOA:-} -c conda-forge # set up the condarc setup_conda_rc "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}" source run_conda_forge_build_setup # Install the yum requirements defined canonically in the # "recipe/yum_requirements.txt" file. After updating that file, # run "conda smithy rerender" and this line will be updated # automatically. /usr/bin/sudo -n yum install -y mesa-libGL mesa-dri-drivers libselinux libXdamage libXxf86vm xorg-x11-server-Xvfb gtk2-devel # make the build number clobber make_build_number "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}" ( endgroup "Configuring conda" ) 2> /dev/null if [[ "${BUILD_WITH_CONDA_DEBUG:-0}" == 1 ]]; then if [[ "x${BUILD_OUTPUT_ID:-}" != "x" ]]; then EXTRA_CB_OPTIONS="${EXTRA_CB_OPTIONS:-} --output-id ${BUILD_OUTPUT_ID}" fi conda debug "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \ ${EXTRA_CB_OPTIONS:-} \ --clobber-file "${CI_SUPPORT}/clobber_${CONFIG}.yaml" # Drop into an interactive shell /bin/bash else conda $BUILD_CMD "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \ --suppress-variables ${EXTRA_CB_OPTIONS:-} \ --clobber-file "${CI_SUPPORT}/clobber_${CONFIG}.yaml" ( startgroup "Validating outputs" ) 2> /dev/null validate_recipe_outputs "${FEEDSTOCK_NAME}" ( endgroup "Validating outputs" ) 2> /dev/null ( startgroup "Uploading packages" ) 2> /dev/null if [[ "${UPLOAD_PACKAGES}" != "False" ]] && [[ "${IS_PR_BUILD}" == "False" ]]; then upload_package --validate --feedstock-name="${FEEDSTOCK_NAME}" "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}" fi ( endgroup "Uploading packages" ) 2> /dev/null fi ( startgroup "Final checks" ) 2> /dev/null touch "${FEEDSTOCK_ROOT}/build_artifacts/conda-forge-build-done-${CONFIG}"
#!/bin/bash fate_cos_address=https://webank-ai-1251170195.cos.ap-guangzhou.myqcloud.com version=1.3.0 egg_version= meta_service_version= roll_version= federation_version= proxy_version= fateboard_version= python_version=1.3.0 jdk_version=8u192 mysql_version=8.0.13 redis_version=5.0.2 fate_flow_db_name=fate_flow eggroll_meta_service_db_name=eggroll_meta package_init() { output_packages_dir=$1 module_name=$2 cd ${output_packages_dir}/source if [[ -e "${module_name}" ]] then rm -rf ${module_name} fi mkdir -p ${module_name} cd ${module_name} } get_module_package() { source_code_dir=$1 module_name=$2 module_binary_package=$3 echo "[INFO] Get ${module_name} package" copy_path=${source_code_dir}/cluster-deploy/packages/${module_binary_package} download_uri=${fate_cos_address}/${module_binary_package} if [[ -f ${copy_path} ]];then echo "[INFO] Copying ${copy_path}" cp ${copy_path} ./ else echo "[INFO] Downloading ${download_uri}" wget -P ${source_code_dir}/cluster-deploy/packages/ ${download_uri} echo "[INFO] Finish downloading ${download_uri}" echo "[INFO] Copying ${copy_path}" cp ${copy_path} ./ fi echo "[INFO] Finish get ${module_name} package" }
<gh_stars>0 import os import sys # (if_testIsTrue, if_testIsFalse) [test] isFat = True state = ('skinny', 'fat')[isFat] print(state)
<filename>src/vue-grid.ts import Grid from './components/grid/BasicGrid.vue' import Cards from './components/grid/BasicCards.vue' import List from './components/grid/BasicList.vue' import AjaxGrid from './components/grid/AjaxGrid.vue' import AjaxList from './components/grid/AjaxList.vue' import AjaxCards from './components/grid/AjaxCards.vue' import GraphGrid from './components/grid/GraphGrid.vue' import GraphList from './components/grid/GraphList.vue' import GraphCards from './components/grid/GraphCards.vue' import Pagination from './components/Pagination.vue' import './assets/scss/_index.scss' const Plugin = { install(Vue: any, options: any = {}) { Vue.component('VGrid', Grid) Vue.component('VList', List) Vue.component('VCards', Cards) const gridOption = { debug: options.debug, perPage: options.perPage, pageSizes: options.pageSizes, routeState: options.routeState } let graphqlOption = {} let ajaxOption = {} if (options.graphql) { Vue.component('VGraphGrid', GraphGrid) Vue.component('VGraphList', GraphList) Vue.component('VGraphCards', GraphCards) graphqlOption = { filterKey: options.filterKey || 'where', limitKey: options.limitKey || 'limit', offsetKey: options.offsetKey || 'offset', aggregateQuery: options.aggregateQuery || 'aggregate { count }', graphqlFilter: options.graphqlFilter, graphqlOrder: options.graphqlOrder, graphqlDataCounter: options.graphqlDataCounter } } if (options.ajax) { Vue.component('VAjaxGrid', AjaxGrid) Vue.component('VAjaxList', AjaxList) Vue.component('VAjaxCards', AjaxCards) ajaxOption = { extractData: options.extractData, pageKey: options.pageKey || 'page', hasSortType: options.hasSortType, sortKey: options.sortKey || 'sort', sortTypeKey: options.sortTypeKey || 'sort_type', perPageKey: options.perPageKey || 'limit', fetchData: options.fetchData, cancelToken: options.cancelToken, // Funcs getPageIndex: options.getPageIndex } } Vue.prototype.$vgrid = { ...gridOption, ...ajaxOption, ...graphqlOption } } } export { Grid, Cards, List, Pagination } export default Plugin
class Protocols::X12::TransactionSetHeader include Mongoid::Document include Mongoid::Timestamps include AASM field :st01, as: :ts_id, type: String field :st02, as: :ts_control_number, type: String field :st03, as: :ts_implementation_convention_reference, type: String field :transaction_kind, type: String field :aasm_state, type: String field :ack_nak_processed_at, type: Time # FIX: this should reference self.transmission.isa08 field :receiver_id, type: String index({receiver_id: 1}) index({st02: 1}) index({aasm_state: 1}) scope :all_transmitted, where({aasm_state: :transmitted}) scope :all_retransmitted, where({aasm_state: :retransmitted}) scope :all_acknowledged, where({aasm_state: :acknowledged}) scope :all_rejected, where({aasm_state: :rejected}) scope :all_initial_enrollments, where({transaction_kind: "initial_enrollment"}) scope :all_enrollment_maintenances, where({transaction_kind: "maintenance"}) scope :all_enrollment_effectuations, where({transaction_kind: "effectuation"}) scope :all_premium_payment_remittances, where({transaction_kind: "remittance"}) belongs_to :carrier, index: true belongs_to :transmission, counter_cache: true, index: true, :class_name => "Protocols::X12::Transmission" mount_uploader :body, EdiBody validates_presence_of :ts_id, :ts_control_number, :ts_implementation_convention_reference, :transaction_kind validates_inclusion_of :transaction_kind, in: ["initial_enrollment", "maintenance", "effectuation", "remittance"] aasm do state :transmitted, initial: true state :retransmitted state :acknowledged state :rejected event :nack do transitions from: :transmitted, to: :rejected end event :retransmit do transitions from: :rejected, to: :retransmitted end event :ack do transitions from: :transmitted, to: :acknowledged end end def acknowledge!(the_date) self.ack_nak_processed_at = the_date self.aasm_state = 'acknowledged' self.save! end def retransmit!(the_date) self.ack_nak_processed_at = the_date self.aasm_state = 'retransmitted' self.save! end def reject!(the_date) self.ack_nak_processed_at = the_date self.aasm_state = 'rejected' self.save! end end
<filename>app/containers/BottomNavigator.js import { connect } from 'react-redux'; import BottomNav from '../components/BottomNav'; import { select } from '../actions/navigation'; import { getSelectedIndex } from '../reducers/navigation'; const mapStateToProps = state => ({ index: getSelectedIndex(state), }); const mapDispatchToProps = dispatch => ({ onSelect: (index) => { dispatch(select(index)); }, }); const BottomNavigator = connect( mapStateToProps, mapDispatchToProps )(BottomNav); export default BottomNavigator;
PROJECT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" docker run --rm -it -v "${PROJECT_DIR}:/work" -w /work --user "$(id -u):$(id -g)" \ -e USER="${USER}" -e CARGO_HOME=/work/.cargo_home \ libmw "${@}"
package stack.implementation; import array.Vector; import array.vector.VectorImplementation; import stack.Stack; public class ArrayStack<T> implements Stack<T>{ private Vector<T> elements; private int size; private static final int DEFAULT_CAPACITY = 100; public ArrayStack() { this.size = 0; this.elements = new VectorImplementation<>(DEFAULT_CAPACITY); } @Override public boolean empty() { return elements.isEmpty(); } @Override public T peek() { T element = elements.at(elements.size()-1); return element; } @Override public T pop() throws Exception { T element = elements.pop(); size = elements.size(); return element; } @Override public void push(T item) { elements.push(item); size = elements.size(); } @Override public int getSize() { return size; } @Override public String toString() { String desc = "["; if(!elements.isEmpty()) { desc = "[ "+elements.at(0); } for(int i=1;i<size;i++) { desc= desc +" <- "+elements.at(i); } desc += " ]"; return desc; } }
get_current_httpd_version () { get_readable_current_choice "HTTPD" "$HTTPD_CONFIG" } get_all_httpd_versions () { get_readable_all_choices "HTTPD" "$HTTPD_CONFIG" } set_httpd_version () { local new=$1 set_readable_choice "HTTPD" "$HTTPD_CONFIG" "$new" }
<filename>src/templates/index.js import React from 'react' import { graphql } from 'gatsby' import styled from 'styled-components' import { media } from '~src/components/variable/mixin' import Seo from '~src/components/seo' import Structured from '~src/components/structured/index' import Layout from '~src/components/layout' import Header from '~src/components/organisms/header' import Footer from '~src/components/organisms/footer' import Splash from '~src/components/atoms/splash' import BestList from '~src/components/organisms/list' import Loop from '~src/components/organisms/loop' import Pagination from '~src/components/molecules/pagination' import Ads from '~src/components/atoms/ads' const Main = styled.main` margin: 32px auto 0; max-width: 640px; width: calc(100% - 16px); ${media.xs} { width: calc(100% - 32px); } ${media.s} { width: calc(100% - 48px); } ${media.ms} { max-width: 690px; width: calc(100% - 64px); } ${media.ls} { margin: 48px auto 0; } ${media.l} { max-width: 960px; } ` export default function Index({ data, pageContext, location }) { const { state = {} } = location const { splash } = state || false const edges = data.allContentfulBlog.edges const Image = pageContext.number === 1 && !splash ? <Splash /> : '' return ( <Layout> <Seo /> <Structured edges={edges} page={pageContext} /> <Header index /> <Main> <BestList edges={data.allContentfulBest.edges} /> <Loop edges={edges} /> <Pagination page={pageContext} /> </Main> <Ads /> <Footer tag={data.allContentfulTag.edges} /> {Image} </Layout> ) } export const query = graphql` query Index($skip: Int!, $limit: Int!) { allContentfulBlog( filter: { node_locale: { eq: "ja-JP" } } sort: { fields: [createdAt], order: DESC } limit: $limit skip: $skip ) { edges { node { slug title createdAt thumbnail { localFile { childImageSharp { fluid { src srcSet srcWebp srcSetWebp presentationHeight presentationWidth } } } } } } } allContentfulBest(filter: { node_locale: { eq: "ja-JP" } }, sort: { fields: [updatedAt], order: DESC }) { edges { node { title slug icon } } } allContentfulTag(filter: { node_locale: { eq: "ja-JP" } }) { edges { node { name slug } } } } `
# Function to configure PHP cli build. configure_cli() { # Remove libtool files. find "${INSTALL_ROOT:?}" -name '*.la' -delete } # Function to build PHP cli sapi. build_cli() { mkdir -p "${INSTALL_ROOT:?}" chmod -R 777 "$INSTALL_ROOT" configure_sapi_options cli build_php cli configure_cli package_sapi cli }
def multiplication_table(number): for i in range(1, 11): print(number, 'x', i, '=', number*i) multiplication_table(number)
<gh_stars>0 package infrusture.entity; import java.util.ArrayList; import java.util.Date; import java.util.List; public class TempDataExample { /** * This field was generated by MyBatis Generator. * This field corresponds to the database table temp_data * * @mbg.generated */ protected String orderByClause; /** * This field was generated by MyBatis Generator. * This field corresponds to the database table temp_data * * @mbg.generated */ protected boolean distinct; /** * This field was generated by MyBatis Generator. * This field corresponds to the database table temp_data * * @mbg.generated */ protected List<Criteria> oredCriteria; /** * This method was generated by MyBatis Generator. * This method corresponds to the database table temp_data * * @mbg.generated */ public TempDataExample() { oredCriteria = new ArrayList<Criteria>(); } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table temp_data * * @mbg.generated */ public void setOrderByClause(String orderByClause) { this.orderByClause = orderByClause; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table temp_data * * @mbg.generated */ public String getOrderByClause() { return orderByClause; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table temp_data * * @mbg.generated */ public void setDistinct(boolean distinct) { this.distinct = distinct; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table temp_data * * @mbg.generated */ public boolean isDistinct() { return distinct; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table temp_data * * @mbg.generated */ public List<Criteria> getOredCriteria() { return oredCriteria; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table temp_data * * @mbg.generated */ public void or(Criteria criteria) { oredCriteria.add(criteria); } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table temp_data * * @mbg.generated */ public Criteria or() { Criteria criteria = createCriteriaInternal(); oredCriteria.add(criteria); return criteria; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table temp_data * * @mbg.generated */ public Criteria createCriteria() { Criteria criteria = createCriteriaInternal(); if (oredCriteria.size() == 0) { oredCriteria.add(criteria); } return criteria; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table temp_data * * @mbg.generated */ protected Criteria createCriteriaInternal() { Criteria criteria = new Criteria(); return criteria; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table temp_data * * @mbg.generated */ public void clear() { oredCriteria.clear(); orderByClause = null; distinct = false; } /** * This class was generated by MyBatis Generator. * This class corresponds to the database table temp_data * * @mbg.generated */ protected abstract static class GeneratedCriteria { protected List<Criterion> criteria; protected GeneratedCriteria() { super(); criteria = new ArrayList<Criterion>(); } public boolean isValid() { return criteria.size() > 0; } public List<Criterion> getAllCriteria() { return criteria; } public List<Criterion> getCriteria() { return criteria; } protected void addCriterion(String condition) { if (condition == null) { throw new RuntimeException("Value for condition cannot be null"); } criteria.add(new Criterion(condition)); } protected void addCriterion(String condition, Object value, String property) { if (value == null) { throw new RuntimeException("Value for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value)); } protected void addCriterion(String condition, Object value1, Object value2, String property) { if (value1 == null || value2 == null) { throw new RuntimeException("Between values for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value1, value2)); } public Criteria andIdIsNull() { addCriterion("id is null"); return (Criteria) this; } public Criteria andIdIsNotNull() { addCriterion("id is not null"); return (Criteria) this; } public Criteria andIdEqualTo(Long value) { addCriterion("id =", value, "id"); return (Criteria) this; } public Criteria andIdNotEqualTo(Long value) { addCriterion("id <>", value, "id"); return (Criteria) this; } public Criteria andIdGreaterThan(Long value) { addCriterion("id >", value, "id"); return (Criteria) this; } public Criteria andIdGreaterThanOrEqualTo(Long value) { addCriterion("id >=", value, "id"); return (Criteria) this; } public Criteria andIdLessThan(Long value) { addCriterion("id <", value, "id"); return (Criteria) this; } public Criteria andIdLessThanOrEqualTo(Long value) { addCriterion("id <=", value, "id"); return (Criteria) this; } public Criteria andIdIn(List<Long> values) { addCriterion("id in", values, "id"); return (Criteria) this; } public Criteria andIdNotIn(List<Long> values) { addCriterion("id not in", values, "id"); return (Criteria) this; } public Criteria andIdBetween(Long value1, Long value2) { addCriterion("id between", value1, value2, "id"); return (Criteria) this; } public Criteria andIdNotBetween(Long value1, Long value2) { addCriterion("id not between", value1, value2, "id"); return (Criteria) this; } public Criteria andNameIsNull() { addCriterion("NAME is null"); return (Criteria) this; } public Criteria andNameIsNotNull() { addCriterion("NAME is not null"); return (Criteria) this; } public Criteria andNameEqualTo(String value) { addCriterion("NAME =", value, "name"); return (Criteria) this; } public Criteria andNameNotEqualTo(String value) { addCriterion("NAME <>", value, "name"); return (Criteria) this; } public Criteria andNameGreaterThan(String value) { addCriterion("NAME >", value, "name"); return (Criteria) this; } public Criteria andNameGreaterThanOrEqualTo(String value) { addCriterion("NAME >=", value, "name"); return (Criteria) this; } public Criteria andNameLessThan(String value) { addCriterion("NAME <", value, "name"); return (Criteria) this; } public Criteria andNameLessThanOrEqualTo(String value) { addCriterion("NAME <=", value, "name"); return (Criteria) this; } public Criteria andNameLike(String value) { addCriterion("NAME like", value, "name"); return (Criteria) this; } public Criteria andNameNotLike(String value) { addCriterion("NAME not like", value, "name"); return (Criteria) this; } public Criteria andNameIn(List<String> values) { addCriterion("NAME in", values, "name"); return (Criteria) this; } public Criteria andNameNotIn(List<String> values) { addCriterion("NAME not in", values, "name"); return (Criteria) this; } public Criteria andNameBetween(String value1, String value2) { addCriterion("NAME between", value1, value2, "name"); return (Criteria) this; } public Criteria andNameNotBetween(String value1, String value2) { addCriterion("NAME not between", value1, value2, "name"); return (Criteria) this; } public Criteria andOtherIsNull() { addCriterion("OTHER is null"); return (Criteria) this; } public Criteria andOtherIsNotNull() { addCriterion("OTHER is not null"); return (Criteria) this; } public Criteria andOtherEqualTo(String value) { addCriterion("OTHER =", value, "other"); return (Criteria) this; } public Criteria andOtherNotEqualTo(String value) { addCriterion("OTHER <>", value, "other"); return (Criteria) this; } public Criteria andOtherGreaterThan(String value) { addCriterion("OTHER >", value, "other"); return (Criteria) this; } public Criteria andOtherGreaterThanOrEqualTo(String value) { addCriterion("OTHER >=", value, "other"); return (Criteria) this; } public Criteria andOtherLessThan(String value) { addCriterion("OTHER <", value, "other"); return (Criteria) this; } public Criteria andOtherLessThanOrEqualTo(String value) { addCriterion("OTHER <=", value, "other"); return (Criteria) this; } public Criteria andOtherLike(String value) { addCriterion("OTHER like", value, "other"); return (Criteria) this; } public Criteria andOtherNotLike(String value) { addCriterion("OTHER not like", value, "other"); return (Criteria) this; } public Criteria andOtherIn(List<String> values) { addCriterion("OTHER in", values, "other"); return (Criteria) this; } public Criteria andOtherNotIn(List<String> values) { addCriterion("OTHER not in", values, "other"); return (Criteria) this; } public Criteria andOtherBetween(String value1, String value2) { addCriterion("OTHER between", value1, value2, "other"); return (Criteria) this; } public Criteria andOtherNotBetween(String value1, String value2) { addCriterion("OTHER not between", value1, value2, "other"); return (Criteria) this; } public Criteria andRemarkIsNull() { addCriterion("REMARK is null"); return (Criteria) this; } public Criteria andRemarkIsNotNull() { addCriterion("REMARK is not null"); return (Criteria) this; } public Criteria andRemarkEqualTo(String value) { addCriterion("REMARK =", value, "remark"); return (Criteria) this; } public Criteria andRemarkNotEqualTo(String value) { addCriterion("REMARK <>", value, "remark"); return (Criteria) this; } public Criteria andRemarkGreaterThan(String value) { addCriterion("REMARK >", value, "remark"); return (Criteria) this; } public Criteria andRemarkGreaterThanOrEqualTo(String value) { addCriterion("REMARK >=", value, "remark"); return (Criteria) this; } public Criteria andRemarkLessThan(String value) { addCriterion("REMARK <", value, "remark"); return (Criteria) this; } public Criteria andRemarkLessThanOrEqualTo(String value) { addCriterion("REMARK <=", value, "remark"); return (Criteria) this; } public Criteria andRemarkLike(String value) { addCriterion("REMARK like", value, "remark"); return (Criteria) this; } public Criteria andRemarkNotLike(String value) { addCriterion("REMARK not like", value, "remark"); return (Criteria) this; } public Criteria andRemarkIn(List<String> values) { addCriterion("REMARK in", values, "remark"); return (Criteria) this; } public Criteria andRemarkNotIn(List<String> values) { addCriterion("REMARK not in", values, "remark"); return (Criteria) this; } public Criteria andRemarkBetween(String value1, String value2) { addCriterion("REMARK between", value1, value2, "remark"); return (Criteria) this; } public Criteria andRemarkNotBetween(String value1, String value2) { addCriterion("REMARK not between", value1, value2, "remark"); return (Criteria) this; } public Criteria andCreateTimeIsNull() { addCriterion("CREATE_TIME is null"); return (Criteria) this; } public Criteria andCreateTimeIsNotNull() { addCriterion("CREATE_TIME is not null"); return (Criteria) this; } public Criteria andCreateTimeEqualTo(Date value) { addCriterion("CREATE_TIME =", value, "createTime"); return (Criteria) this; } public Criteria andCreateTimeNotEqualTo(Date value) { addCriterion("CREATE_TIME <>", value, "createTime"); return (Criteria) this; } public Criteria andCreateTimeGreaterThan(Date value) { addCriterion("CREATE_TIME >", value, "createTime"); return (Criteria) this; } public Criteria andCreateTimeGreaterThanOrEqualTo(Date value) { addCriterion("CREATE_TIME >=", value, "createTime"); return (Criteria) this; } public Criteria andCreateTimeLessThan(Date value) { addCriterion("CREATE_TIME <", value, "createTime"); return (Criteria) this; } public Criteria andCreateTimeLessThanOrEqualTo(Date value) { addCriterion("CREATE_TIME <=", value, "createTime"); return (Criteria) this; } public Criteria andCreateTimeIn(List<Date> values) { addCriterion("CREATE_TIME in", values, "createTime"); return (Criteria) this; } public Criteria andCreateTimeNotIn(List<Date> values) { addCriterion("CREATE_TIME not in", values, "createTime"); return (Criteria) this; } public Criteria andCreateTimeBetween(Date value1, Date value2) { addCriterion("CREATE_TIME between", value1, value2, "createTime"); return (Criteria) this; } public Criteria andCreateTimeNotBetween(Date value1, Date value2) { addCriterion("CREATE_TIME not between", value1, value2, "createTime"); return (Criteria) this; } public Criteria andUpdateTimeIsNull() { addCriterion("UPDATE_TIME is null"); return (Criteria) this; } public Criteria andUpdateTimeIsNotNull() { addCriterion("UPDATE_TIME is not null"); return (Criteria) this; } public Criteria andUpdateTimeEqualTo(Date value) { addCriterion("UPDATE_TIME =", value, "updateTime"); return (Criteria) this; } public Criteria andUpdateTimeNotEqualTo(Date value) { addCriterion("UPDATE_TIME <>", value, "updateTime"); return (Criteria) this; } public Criteria andUpdateTimeGreaterThan(Date value) { addCriterion("UPDATE_TIME >", value, "updateTime"); return (Criteria) this; } public Criteria andUpdateTimeGreaterThanOrEqualTo(Date value) { addCriterion("UPDATE_TIME >=", value, "updateTime"); return (Criteria) this; } public Criteria andUpdateTimeLessThan(Date value) { addCriterion("UPDATE_TIME <", value, "updateTime"); return (Criteria) this; } public Criteria andUpdateTimeLessThanOrEqualTo(Date value) { addCriterion("UPDATE_TIME <=", value, "updateTime"); return (Criteria) this; } public Criteria andUpdateTimeIn(List<Date> values) { addCriterion("UPDATE_TIME in", values, "updateTime"); return (Criteria) this; } public Criteria andUpdateTimeNotIn(List<Date> values) { addCriterion("UPDATE_TIME not in", values, "updateTime"); return (Criteria) this; } public Criteria andUpdateTimeBetween(Date value1, Date value2) { addCriterion("UPDATE_TIME between", value1, value2, "updateTime"); return (Criteria) this; } public Criteria andUpdateTimeNotBetween(Date value1, Date value2) { addCriterion("UPDATE_TIME not between", value1, value2, "updateTime"); return (Criteria) this; } } /** * This class was generated by MyBatis Generator. * This class corresponds to the database table temp_data * * @mbg.generated do_not_delete_during_merge */ public static class Criteria extends GeneratedCriteria { protected Criteria() { super(); } } /** * This class was generated by MyBatis Generator. * This class corresponds to the database table temp_data * * @mbg.generated */ public static class Criterion { private String condition; private Object value; private Object secondValue; private boolean noValue; private boolean singleValue; private boolean betweenValue; private boolean listValue; private String typeHandler; public String getCondition() { return condition; } public Object getValue() { return value; } public Object getSecondValue() { return secondValue; } public boolean isNoValue() { return noValue; } public boolean isSingleValue() { return singleValue; } public boolean isBetweenValue() { return betweenValue; } public boolean isListValue() { return listValue; } public String getTypeHandler() { return typeHandler; } protected Criterion(String condition) { super(); this.condition = condition; this.typeHandler = null; this.noValue = true; } protected Criterion(String condition, Object value, String typeHandler) { super(); this.condition = condition; this.value = value; this.typeHandler = typeHandler; if (value instanceof List<?>) { this.listValue = true; } else { this.singleValue = true; } } protected Criterion(String condition, Object value) { this(condition, value, null); } protected Criterion(String condition, Object value, Object secondValue, String typeHandler) { super(); this.condition = condition; this.value = value; this.secondValue = secondValue; this.typeHandler = typeHandler; this.betweenValue = true; } protected Criterion(String condition, Object value, Object secondValue) { this(condition, value, secondValue, null); } } }
#!/usr/bin/env bash # NAME # zeppelin.sh - controls snorkel-zeppelin docker image # # SYNOPSIS # zeppelin.sh [-] # # DESCRIPTION # zeppelin.sh controls the zeppelin docker image in snorkel # the script takes exactly one argument # # ARGUMENTS # -h/-? | --help Displays help # -r | --start Starts zeppelin # -s | --stop Stops zeppelin # -f | --refresh Refresh dependencies function start { echo "Starting snorkel..." docker-compose up -d echo echo "========== Sqooba Snorkeling Toolset ==========" echo echo "Zeppelin and Spark are starting ... might take some time ..." echo echo "Zeppelin: http://localhost:${ZEPPELIN_PORT}" echo "Spark: http://localhost:${SPARK_UI_PORT} -- after you run your first notebook." echo echo "Upload your data in ${ZEPPELIN_ROOT_DIR}/data" echo "Spark logs are stored in ${ZEPPELIN_ROOT_DIR}/logs" echo "Your notebooks are stored in ${ZEPPELIN_ROOT_DIR}/notebooks" echo echo "Run $(dirname $0)/refresh.sh to update js/css/python dependencies" echo echo "========== Happy Snorkeling ! ==========" } function stop { echo "Stopping snorkel..." docker-compose stop echo "Snorkel stopped" } function refresh { echo "Refreshing javascript dependencies" docker exec -it zeppelinstarter_zeppelin-starter_1 install-js.sh echo "Refreshing python dependencies" docker exec -it zeppelinstarter_zeppelin-starter_1 install-python.sh echo "Finished!!!" } function help { echo "Usage: ${0} arg" echo " Where arg is exactly one argument." echo echo "Arguments:" echo " -h/-? | --help Displays help" echo " -r | --start Starts zeppelin" echo " -s | --stop Stops zeppelin " echo " -f | --refresh Refresh dependencies" } if [ ! $# -eq 1 ]; then echo "${0} takes exactly 1 argument." help exit 1 fi source ./env.sh # Parse long options for arg in "$@"; do shift case "$arg" in "--help") set -- "$@" "-h" ;; "--start")set -- "$@" "-r" ;; "--stop") set -- "$@" "-s" ;; "--refresh") set -- "$@" "-f" ;; "?") set -- "$@" "-?" ;; *) set -- "$@" "$arg" esac done # Parse short options OPTIND=1 while getopts "fhrs?" opt; do case "$opt" in "h") help; exit 0 ;; "r") start ;; "s") stop ;; "f") refresh ;; "?") help exit 0 ;; esac done shift $(expr $OPTIND - 1) # remove options from positional parameters
class ReferableReward < ApplicationRecord enum acquireable_by: [], _prefix: :acquireable_by validates :referrals_required, uniqueness: { scope: :acquireable_by, message: 'Target aleady present for this kind of reward' } def self.best_reward(acquireable_by) where(acquireable_by: acquireable_by).order(referrals_required: :desc).first end def self.zeroth_tier(acquireable_by) zeroth = find_by(referrals_required: 0, acquireable_by: acquireable_by) return zeroth if zeroth create!(reward: 'none', referrals_required: 0, acquireable_by: acquireable_by) end def next_reward(above: 0) query = self.class .where(acquireable_by: acquireable_by) .where('referrals_required > ?', referrals_required) .where('referrals_required >= ?', above) .order(referrals_required: :asc) .limit(1) query.any? ? query.first : self end def previous_reward(below: 10e5) query = self.class .where(acquireable_by: acquireable_by) .where('referrals_required < ?', referrals_required) .where('referrals_required <= ?', below) .order(referrals_required: :desc) .limit(1) query.any? ? query.first : self end end
#!/usr/bin/env bash # Copyright 2019 curoky(cccuroky@gmail.com). # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -xeuo pipefail docker run --rm -it curoky/bazel:ubuntu21.04-4.0.0 bash
parallel --jobs 5 < ./results/exp_readc/run-0/lustre_6n_6t_6d_1000f_617m_1i/jobs/jobs_n4.txt
// Package fusehooks implements the FUSE fs interfaces for Google Drive. package fusehooks import ( "context" "fmt" "io" "os" "syscall" "time" "bazil.org/fuse" "bazil.org/fuse/fs" "github.com/althk/drivefs/driveapi" "google.golang.org/api/drive/v3" ) type FS struct { Ctx context.Context DriveSvc *drive.Service } var _ fs.FS = (*FS)(nil) func (f *FS) Root() (fs.Node, error) { root, err := driveapi.RootFolder(f.Ctx, f.DriveSvc) if root == nil { return nil, err } return &Dir{ root, }, nil } type Dir struct { driveapi.File } var _ fs.Node = (*Dir)(nil) func (d *Dir) Attr(_ context.Context, attr *fuse.Attr) error { return mapAttr(d.File, attr) } func mapAttr(f driveapi.File, a *fuse.Attr) error { a.Size = uint64(f.Size()) a.Mtime = time.Now() a.Ctime = time.Now() if f.IsDir() { a.Mode = os.ModeDir | 0500 } else { a.Mode = 0400 } return nil } var _ = fs.HandleReadDirAller(&Dir{}) func (d *Dir) ReadDirAll(ctx context.Context) ([]fuse.Dirent, error) { files, err := d.ListFiles(ctx) if err != nil { return nil, err } var res []fuse.Dirent for _, f := range files { var e fuse.Dirent e.Name = f.Name() if f.IsDir() { e.Type = fuse.DT_Dir } else { e.Type = fuse.DT_File } res = append(res, e) } return res, nil } var _ = fs.NodeRequestLookuper(&Dir{}) func (d *Dir) Lookup( _ context.Context, req *fuse.LookupRequest, _ *fuse.LookupResponse) (fs.Node, error) { name := req.Name for _, f := range d.Files() { if f.Name() == name { if f.IsDir() { return &Dir{ f, }, nil } return &File{ f, }, nil } } return nil, fuse.ToErrno(syscall.ENOENT) } type File struct { file driveapi.File } var _ fs.Node = (*File)(nil) func (f *File) Attr(_ context.Context, attr *fuse.Attr) error { return mapAttr(f.file, attr) } var _ = fs.NodeOpener(&File{}) func (f *File) Open(ctx context.Context, req *fuse.OpenRequest, resp *fuse.OpenResponse) (fs.Handle, error) { r, err := f.file.Download(ctx) if err != nil { return nil, err } resp.Flags |= fuse.OpenKeepCache return &FileHandle{r}, nil } type FileHandle struct { r io.ReadCloser } var _ fs.Handle = (*FileHandle)(nil) var _ fs.HandleReleaser = (*FileHandle)(nil) func (fh *FileHandle) Release(_ context.Context, req *fuse.ReleaseRequest) error { fmt.Println("file handle closed") return fh.r.Close() } var _ = fs.HandleReader(&FileHandle{}) func (fh *FileHandle) Read(ctx context.Context, req *fuse.ReadRequest, resp *fuse.ReadResponse) error { buf := make([]byte, req.Size) n, err := fh.r.Read(buf) fmt.Printf("read %d of %d bytes\n", n, req.Size) resp.Data = buf[:n] return err }
<reponame>Zsombi55/Parking-Sharing var express = require('express'); var mysql = require('mysql'); var fs = require("fs"); // fs - file system var router = express.Router(); /* DB: "parking" connection settings. */ const pool = mysql.createPool({ host : 'localhost', user : 'root', password : '', database : 'parking', timezone: '+00:00' //set to "neutral". }); /* JSON: GET spots listing. */ router.get('/', function(req, res, next) { res.send('Respond with a resource of JSON data.'); }); // DB: POST (GET /READ) entire "spots" listed. http://localhost:3000/get // Takes into consideration the search inputs too. router.post('/', function(req, res, next) { // Route '/' changed to '/s' as a hack to make demo JSON GET to work. const city = req.body.city; const area = req.body.area; const address = req.body.address; const personId = req.body.personId; // internal queries, for sql. const cityQ = city ? ` AND city LIKE "${city}%"` : ``; const areaQ = area ? ` AND area LIKE "${area}%"` : ``; const addressQ = address ? ` AND address LIKE "%${address}%"` : ``; pool.getConnection((err, connection) => { /* Select those records from the "spots" table whose "id" value * has no reference records in the "reservations" table * where those records' "ending" variable IS NULL (has no value), * and --^^--IF either a "city" or "area" column is asked * include those records that have the asked values in them--^^--, * AND arrange the result of all this so that the * "address" values will be in alphanumerical ASCending order. */ const sql = `SELECT spots.*, reservations.person_id, reservations.start, reservations.ending FROM spots LEFT JOIN reservations ON (spots.id = reservations.spot_id) WHERE ending IS NULL AND ((person_id IS NULL ${cityQ} ${areaQ} ${addressQ}) OR person_id = ${personId}) ORDER BY person_id DESC, address ASC`; connection.query(sql, (err, results) => { if(!!err){ console.log(err); } else { console.log("DB \"parking\" Connected! :)"); res.json(results); connection.release(); } }); }); }); // http://localhost:3000/booking/add router.post('/book', function(req, res, next) { const personId = req.body.personId; const spotId = req.body.spotId; console.warn("Update: ", personId, spotId); pool.getConnection((err, connection) => { const sql = `INSERT INTO reservations (person_id, spot_id) VALUES (${personId}, ${spotId})`; console.log(sql); connection.query(sql, (err, results) => { res.json({ success: true, message: "Done!" }); connection.release(); }); }); }); module.exports = router;
<gh_stars>0 FactoryGirl.define do factory :waitlist do name "MyString" email "" instragram "MyString" px500 "MyString" facebook "MyString" phone "MyString" website "MyString" smugmug "MyString" end end
/* * Copyright (c) 2002-2008 LWJGL Project * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'LWJGL' nor the names of * its contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /** * $Id: org_lwjgl_input_Keyboard.c 2385 2006-06-23 16:45:21Z elias_naur $ * * @author elias_naue <<EMAIL>> * @version $Revision: 2385 $ */ #include "Window.h" #include <jni.h> #include "org_lwjgl_opengl_WindowsKeyboard.h" JNIEXPORT jint JNICALL Java_org_lwjgl_opengl_WindowsKeyboard_GetKeyState(JNIEnv *env, jclass unused, jint virt_key) { return GetKeyState(virt_key); } JNIEXPORT jint JNICALL Java_org_lwjgl_opengl_WindowsKeyboard_MapVirtualKey(JNIEnv *env, jclass unused, jint uCode, jint uMapType) { return MapVirtualKey(uCode, uMapType); } JNIEXPORT jboolean JNICALL Java_org_lwjgl_opengl_WindowsKeyboard_isWindowsNT(JNIEnv *env, jclass unused) { OSVERSIONINFO osvi; osvi.dwOSVersionInfoSize = sizeof(osvi); GetVersionEx(&osvi); return osvi.dwPlatformId == VER_PLATFORM_WIN32_NT ? JNI_TRUE : JNI_FALSE; } JNIEXPORT jint JNICALL Java_org_lwjgl_opengl_WindowsKeyboard_ToUnicode(JNIEnv *env, jclass unused, jint wVirtKey, jint wScanCode, jobject lpKeyState_obj, jobject pwszBuff_obj, jint cchBuff, jint flags) { const PBYTE lpKeyState = (*env)->GetDirectBufferAddress(env, lpKeyState_obj); LPWSTR pwszBuff = (*env)->GetDirectBufferAddress(env, pwszBuff_obj); return ToUnicode(wVirtKey, wScanCode, lpKeyState, pwszBuff, cchBuff, flags); } JNIEXPORT jint JNICALL Java_org_lwjgl_opengl_WindowsKeyboard_ToAscii(JNIEnv *env, jclass unused, jint wVirtKey, jint wScanCode, jobject lpKeyState_obj, jobject lpChar_obj, jint flags) { const PBYTE lpKeyState = (*env)->GetDirectBufferAddress(env, lpKeyState_obj); LPWORD lpChar = (*env)->GetDirectBufferAddress(env, lpChar_obj); return ToAscii(wVirtKey, wScanCode, lpKeyState, lpChar, flags); } JNIEXPORT jint JNICALL Java_org_lwjgl_opengl_WindowsKeyboard_GetKeyboardState(JNIEnv *env, jclass unused, jobject lpKeyState_obj) { PBYTE lpKeyState = (*env)->GetDirectBufferAddress(env, lpKeyState_obj); return GetKeyboardState(lpKeyState); }
#!/bin/bash source `dirname $0`/../common.sh docker run -v $OUTPUT_DIR:/tmp/output -v $CACHE_DIR:/tmp/cache -e VERSION=2.7.4 -e STACK=heroku-20 hone/ruby-builder:heroku-20
<reponame>das-praktische-schreinerlein/mycms-server-commons<filename>src/server-commons/serverlog.utils.ts export class ServerLogUtils { public static sanitizeLogMsg(msg: any): string { if (msg === undefined) { return undefined; } return (msg + '').replace(/[^-A-Za-z0-9äöüßÄÖÜ/+;,:._*]*/gi, ''); } }
<gh_stars>0 package cyclops.container.control.ior; import cyclops.container.foldable.OrElseValue; import cyclops.container.control.AbstractOrElseValueTest; import cyclops.container.control.Ior; public class IorOrElseValueTest extends AbstractOrElseValueTest { @Override public OrElseValue<Integer, OrElseValue<Integer, ?>> of(int value) { return (OrElseValue) Ior.right(value); } @Override public OrElseValue<Integer, OrElseValue<Integer, ?>> empty2() { return (OrElseValue) Ior.left(new Exception()); } @Override public OrElseValue<Integer, OrElseValue<Integer, ?>> empty3() { return (OrElseValue) Ior.left(new Exception()); } @Override public OrElseValue<Integer, OrElseValue<Integer, ?>> empty4() { return (OrElseValue) Ior.left(new Exception()); } @Override public OrElseValue<Integer, OrElseValue<Integer, ?>> empty1() { return (OrElseValue) Ior.left(new Exception()); } @Override public boolean isLazy() { return false; } }
package services import sttp.client._ import io.circe.generic.auto._ class WorldWeatherOnline(key: String) extends ThirdPartyService { this: CachingBehavior => def withAuth[T, S]( request: RequestT[sttp.client.Identity, T, S] ): RequestT[sttp.client.Identity, T, S] = { request.copy[Identity, T, S]( uri = request.uri .querySegment(sttp.model.Uri.QuerySegment.KeyValue("key", key)) ) } val weatherForCity = cachable((cityName: String) => { type Response = WWOResponse[WWOClimateAverages] fetch[Response]( uri"http://api.worldweatheronline.com/premium/v1/weather.ashx?q=$cityName&format=json&mca=yes" ) }) val weatherForLatLong = cachable((coords: (Double, Double)) => { type Response = WWOResponse[WWOClimateAverages] fetch[Response]( uri"http://api.worldweatheronline.com/premium/v1/weather.ashx?q=${coords._1},${coords._2}&format=json&mca=yes" ) }) } case class WWOResponse[A](data: A) case class WWOClimateAverages(`ClimateAverages`: List[WWOMonthHolder]) case class WWOMonthHolder(month: List[WWOClimateMonth]) case class WWOClimateMonth(name: String, avgDailyRainfall: Float)
#!/bin/bash if [ -f "/etc/xdg/lxsession/LXDE-pi/autostart.org" ] then echo "" echo "++++++++++" echo "+ chromium browser is already installed" else echo "" echo "++++++++++" echo "+ install chromium browser " sudo apt-get install chromium-browser -y echo "+ install unclutter " sudo apt-get install unclutter -y echo "+ update autostart " echo "#!/bin/bash" >> start_browser.sh.tmp echo "sleep 90" >> start_browser.sh.tmp echo "chromium-browser --kiosk --noerrdialogs --check-for-update-interval=31536000 --disable-infobars http://0.0.0.0:8080/habpanel/index.html#/view/home" >> start_browser.sh.tmp sudo mkdir /etc/openhab/exec sudo mv start_browser.sh.tmp /etc/openhab/exec/start_browser.sh sudo chmod +x /etc/openhab/exec/start_browser.sh echo ' ' >> autostart_extension.tmp echo '# disable curser' >> autostart_extension.tmp echo '@unclutter' >> autostart_extension.tmp echo ' ' >> autostart_extension.tmp echo '# start Chromium in full mode' >> autostart_extension.tmp echo '@/etc/openhab/exec/start_browser.sh' >> autostart_extension.tmp sudo cp /etc/xdg/lxsession/LXDE-pi/autostart autostart.org cat autostart.org >> autostart.extended cat autostart_extension.tmp >> autostart.extended sudo mv autostart.extended /etc/xdg/lxsession/LXDE-pi/autostart sudo mv autostart.org /etc/xdg/lxsession/LXDE-pi/autostart.org fi
package io.opensphere.myplaces.util; import java.awt.Color; import java.awt.Font; import de.micromata.opengis.kml.v_2_2_0.ExtendedData; import de.micromata.opengis.kml.v_2_2_0.Placemark; import io.opensphere.core.Toolbox; import io.opensphere.core.preferences.Preferences; import io.opensphere.core.preferences.PreferencesRegistry; import io.opensphere.myplaces.constants.Constants; /** * Migrates the map point options into the my places options. * */ public class OptionsMigrator { /** * Migrates the options from the old point options to the new my places * options. * * @param accessor Used to save my places. * @param toolbox Used to get the preferences. */ public void migrateIfNeedBe(OptionsAccessor accessor, Toolbox toolbox) { PreferencesRegistry registry = toolbox.getPreferencesRegistry(); Preferences placesPreferences = registry.getPreferences(OptionsAccessor.class); String defaultPlace = placesPreferences.getString(OptionsAccessor.DEFAULT_PLACES_PROP, null); if (defaultPlace == null) { Preferences pointPreferences = registry .getPreferences("io.opensphere.myplaces.specific.points.editor.OptionsAccessor"); String defaultPoint = pointPreferences.getString("defaultPoint", null); if (defaultPoint == null) { Placemark defaultPlacemark = new Placemark(); PlacemarkUtils.setPlacemarkTextColor(defaultPlacemark, Color.white); PlacemarkUtils.setPlacemarkColor(defaultPlacemark, Color.gray); ExtendedData extendedData = defaultPlacemark.createAndSetExtendedData(); ExtendedDataUtils.putBoolean(extendedData, Constants.IS_TITLE, true); ExtendedDataUtils.putBoolean(extendedData, Constants.IS_DISTANCE_ID, true); ExtendedDataUtils.putBoolean(extendedData, Constants.IS_HEADING_ID, true); Font defaultFont = PlacemarkUtils.DEFAULT_FONT; PlacemarkUtils.setPlacemarkFont(defaultPlacemark, defaultFont); accessor.saveDefaultPlacemark(defaultPlacemark); } else { defaultPlace = defaultPoint; } if (defaultPlace != null) { placesPreferences.putString(OptionsAccessor.DEFAULT_PLACES_PROP, defaultPlace, this); } } } }
#!/bin/bash # _______ _______ # | _____| | ___ | # | | | | | | Rolando Ramos Torres (@rolodoom) # | | | |___| | http://rolandoramostorres.com # |_| |_______| # _ _______ audio-assault-setup/install.sh # | | | ___ | # | | | | | | Install script # | |_____ | |___| | for Audio Assault software # |_______| |_______| Tested on Kubuntu 20.04 LTS # # # # # test for required commands command -v unzip >/dev/null 2>&1 || { echo >&2 "I require unzip but it's not installed. Aborting."; exit 1; } TEMPDIR=$(readlink -f "@temp") VST=$(readlink -f "/usr/lib/vst") AUDIOASSAULT=$(readlink -f "/usr/lib/vst/Audio Assault") AUDIOASSAULTBIN=$(readlink -f "/opt/Audio Assault") # install subrutine aa_install () { echo "installing..." # check if @temp exists if [ ! -d "$TEMPDIR" ]; then # Create dir if doesn't exists mkdir @temp # Unzip Audio Assult Sofware unzip -q 'Duality Bass Studio 1.2.51 Installer.zip' -d "$TEMPDIR"/Duality unzip -q 'Sigma v2 upd103.zip' -d "$TEMPDIR"/Sigma unzip -q 'RVXX v2 upd103.zip' -d "$TEMPDIR"/RVXX unzip -q 'Sigma 102.zip' -d "$TEMPDIR"/Sigma unzip -q 'RVXX Installer 1.0.1.zip' -d "$TEMPDIR"/RVXX fi # clean mac Files from installers find "$TEMPDIR" -name '.DS_Store' -exec rm -rf {} \; # check if /usr/lib/vst/Audio Assault exists if [ ! -d "$AUDIOASSAULT" ]; then # Create dir if doesn't exists sudo mkdir "$AUDIOASSAULT" fi # copy VST files sudo cp -r "$TEMPDIR/Duality/Linux 1.2.5/Duality Bass Studio.so" "$AUDIOASSAULT" sudo cp -r "$TEMPDIR/Sigma/Sigma v2/Sigma v2 Linux/Sigma v2.so" "$AUDIOASSAULT" sudo cp -r "$TEMPDIR/RVXX/RVXX v2/RVXX v2 Linux/RVXX v2.so" "$AUDIOASSAULT" #1 sudo cp -r "$TEMPDIR/Sigma/SigmaVST 102.so" "$AUDIOASSAULT" sudo cp -r "$TEMPDIR/RVXX/RVXX Installer 1.0.1/RVXX Linux /RVXX VST 101.so" "$AUDIOASSAULT" # check if $HOME/.audio-assault Assault exists if [ ! -d "$AUDIOASSAULTBIN" ]; then # Create dir if doesn't exists sudo mkdir "$AUDIOASSAULTBIN" sudo mkdir "$AUDIOASSAULTBIN/RVXX" sudo mkdir "$AUDIOASSAULTBIN/Sigma" fi # copy Stand Alone #v2 sudo cp -r "$TEMPDIR/RVXX/RVXX v2/RVXX v2 Linux/RVXX" "$AUDIOASSAULTBIN" sudo cp -r "$TEMPDIR/RVXX/RVXX v2/RVXX v2 Linux/RVXX v2 Standalone" "$AUDIOASSAULTBIN/RVXX" sudo cp -r "$TEMPDIR/Sigma/Sigma v2/Sigma v2 Linux/Sigma" "$AUDIOASSAULTBIN" sudo cp -r "$TEMPDIR/Sigma/Sigma v2/Sigma v2 Linux/Sigma v2 Standalone" "$AUDIOASSAULTBIN/Sigma" # v1 sudo cp -r "$TEMPDIR/Duality/Linux 1.2.5/Duality" "$AUDIOASSAULTBIN" sudo cp -r "$TEMPDIR/Duality/Linux 1.2.5/Duality Bass Studio" "$AUDIOASSAULTBIN/Duality" sudo cp -r "$TEMPDIR/RVXX/RVXX Installer 1.0.1/RVXX Linux /RVXX STANDALONE 101" "$AUDIOASSAULTBIN/RVXX" sudo cp -r "$TEMPDIR/Sigma/Sigma Standalone 102" "$AUDIOASSAULTBIN/Sigma" #chmod 777 sudo chmod -R 777 "$AUDIOASSAULTBIN/Duality/" sudo chmod -R 777 "$AUDIOASSAULTBIN/RVXX/" sudo chmod -R 777 "$AUDIOASSAULTBIN/Sigma/" # chmod #v2 sudo chmod +x "$AUDIOASSAULTBIN/RVXX/RVXX v2 Standalone" sudo chmod +x "$AUDIOASSAULTBIN/Sigma/Sigma v2 Standalone" #v1 sudo chmod +x "$AUDIOASSAULTBIN/Duality/Duality Bass Studio" sudo chmod +x "$AUDIOASSAULTBIN/RVXX/RVXX STANDALONE 101" sudo chmod +x "$AUDIOASSAULTBIN/Sigma/Sigma Standalone 102" # usr/local/bin links sudo ln -s "$AUDIOASSAULTBIN/RVXX/RVXX v2 Standalone" "/usr/local/bin/rvxx_v2" sudo ln -s "$AUDIOASSAULTBIN/Sigma/Sigma v2 Standalone" "/usr/local/bin/sigma_v2" #v1 sudo ln -s "$AUDIOASSAULTBIN/Duality/Duality Bass Studio" "/usr/local/bin/duality-bass-studio_v1" sudo ln -s "$AUDIOASSAULTBIN/RVXX/RVXX STANDALONE 101" "/usr/local/bin/rvxx_v1" sudo ln -s "$AUDIOASSAULTBIN/Sigma/Sigma Standalone 102" "/usr/local/bin/sigma_v1" # copy .local files sudo cp -r usr/share /usr # Remove @temp sudo rm -rf "$TEMPDIR" echo "Done!!!" } # uninstall subrutine aa_uninstall () { echo "Uninstalling..." # @temp sudo rm -rf "$TEMPDIR" # desktop files sudo rm -rf /usr/share/applications/sigma_v2.desktop sudo rm -rf /usr/share/applications/rvxx_v2.desktop sudo rm -rf /usr/share/applications/duality-bass-studio_v1.desktop sudo rm -rf /usr/share/applications/rvxx_v1.desktop sudo rm -rf /usr/share/applications/sigma_v1.desktop # icons sudo rm -rf /usr/share/icons/sigma.png sudo rm -rf /usr/share/icons/rvxx.png sudo rm -rf /usr/share/icons/duality-bass-studio.png # vst sudo rm -rf "$AUDIOASSAULT" # bin sudo rm -rf "$AUDIOASSAULTBIN" sudo rm -rf /usr/local/bin/rvxx_v2 sudo rm -rf /usr/local/bin/sigma_v2 sudo rm -rf /usr/local/bin/duality-bass-studio_v1 sudo rm -rf /usr/local/bin/rvxx_v1 sudo rm -rf /usr/local/bin/sigma_v1 echo "Done!!!" } # Start echo "" echo "**************************************" echo "* Audio Assault Installer *" echo "**************************************" echo "Available commands:" echo "" echo " [I]nstall" echo " [U]ninstall" echo " [Q]uit" echo "" echo -n "Command [I,U,Q]:" read input echo "" # case "$input" in install|INSTALL|I|i) aa_install ;; uninstall|UNINSTALL|U|u) aa_uninstall ;; quit|QUIT|Q|q|exit|EXIT|x|X) echo "Bye!!!" ;; # NOT IMPLEMENTED *) echo "Choose an available command!!!" ;; esac
/** * Chunking using generators and array splice to be efficient on memory. * Generator makes sure that we create a chunk only when requested. * Array splice makes sure that chuck processed gets freed from memory. * @param {Array} arr Array of objects * @param {Number} batchSize Batch size */ function* chunk(arr = [], batchSize) { // This is a check if object, map, etc is passed if (!Array.isArray(arr)) throw new TypeError("Pass a valid array"); // TODO change the check if (isNaN(batchSize)) yield arr; const batch_count = Math.ceil(arr.length / batchSize); for (let i = 0; i < batch_count; i++) { yield arr.splice(0, batchSize); } } module.exports = chunk;
<filename>tests/testsuite/CUDASamples/5_Simulations_particles_reorderDataAndFindCellStateD/common.h // particles_kernel.cuh #define USE_TEX 0 #if USE_TEX #define FETCH(t, i) tex1Dfetch(t##Tex, i) #else #define FETCH(t, i) t[i] #endif //#include "vector_types.h" typedef unsigned int uint; // simulation parameters struct SimParams { float3 colliderPos; float colliderRadius; float3 gravity; float globalDamping; float particleRadius; uint3 gridSize; uint numCells; float3 worldOrigin; float3 cellSize; uint numBodies; uint maxParticlesPerCell; float spring; float damping; float shear; float attraction; float boundaryDamping; }; // particles_kernel_impl.cuh #if USE_TEX // textures for particle position and velocity texture<float4, 1, cudaReadModeElementType> oldPosTex; texture<float4, 1, cudaReadModeElementType> oldVelTex; texture<uint, 1, cudaReadModeElementType> gridParticleHashTex; texture<uint, 1, cudaReadModeElementType> cellStartTex; texture<uint, 1, cudaReadModeElementType> cellEndTex; #endif // simulation parameters in constant memory __constant__ SimParams params; // calculate position in uniform grid static __attribute__((always_inline)) __device__ int3 calcGridPos(float3 p) { int3 gridPos; gridPos.x = floor((p.x - params.worldOrigin.x) / params.cellSize.x); gridPos.y = floor((p.y - params.worldOrigin.y) / params.cellSize.y); gridPos.z = floor((p.z - params.worldOrigin.z) / params.cellSize.z); return gridPos; } // calculate address in grid from position (clamping to edges) static __attribute__((always_inline)) __device__ uint calcGridHash(int3 gridPos) { gridPos.x = gridPos.x & (params.gridSize.x-1); // wrap grid, assumes size is power of 2 gridPos.y = gridPos.y & (params.gridSize.y-1); gridPos.z = gridPos.z & (params.gridSize.z-1); return __umul24(__umul24(gridPos.z, params.gridSize.y), params.gridSize.x) + __umul24(gridPos.y, params.gridSize.x) + gridPos.x; } // collide two spheres using DEM method static __attribute__((always_inline)) __device__ float3 collideSpheres(float3 posA, float3 posB, float3 velA, float3 velB, float radiusA, float radiusB, float attraction) { // calculate relative position float3 relPos = posB - posA; float dist = length(relPos); float collideDist = radiusA + radiusB; float3 force = make_float3(0.0f); if (dist < collideDist) { float3 norm = relPos / dist; // relative velocity float3 relVel = velB - velA; // relative tangential velocity float3 tanVel = relVel - (dot(relVel, norm) * norm); // spring force force = -params.spring*(collideDist - dist) * norm; // dashpot (damping) force force += params.damping*relVel; // tangential shear force force += params.shear*tanVel; // attraction force += attraction*relPos; } return force; } // collide a particle against all other particles in a given cell static __attribute__((always_inline)) __device__ float3 collideCell(int3 gridPos, uint index, float3 pos, float3 vel, float4 *oldPos, float4 *oldVel, uint *cellStart, uint *cellEnd) { uint gridHash = calcGridHash(gridPos); // get start of bucket for this cell uint startIndex = FETCH(cellStart, gridHash); float3 force = make_float3(0.0f); if (startIndex != 0xffffffff) // cell is not empty { // iterate over particles in this cell uint endIndex = FETCH(cellEnd, gridHash); for (uint j=startIndex; j<endIndex; j++) { if (j != index) // check not colliding with self { float3 pos2 = make_float3(FETCH(oldPos, j)); float3 vel2 = make_float3(FETCH(oldVel, j)); // collide two spheres force += collideSpheres(pos, pos2, vel, vel2, params.particleRadius, params.particleRadius, params.attraction); } } } return force; }
class Base { constructor(x, y, w, h) { var options = { isStatic: true }; this.body = Bodies.rectangle(x, y, w, h, options); this.w = w; this.h = h; World.add(world, this.body); } show() { push(); fill("#8d6e63"); rect(0, 0, this.w, this.h); pop(); } }
import useLocalStorage from "./useLocalStorage"; import React, { useState, useEffect } from "react"; const useDarkMode = initialValue => { const [isDark, setIsDark] = useLocalStorage("dark-mode", initialValue); useEffect(() => { if (isDark) { document.getElementsByTagName("BODY")[0].classList.add("dark-mode"); } else { document.getElementsByTagName("BODY")[0].classList.remove("dark-mode"); } }); return [isDark, setIsDark]; }; export default useDarkMode;