text stringlengths 1 1.05M |
|---|
/*
* Copyright (c) 2004-2021, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.android.core.arch.repositories.collection.internal;
import org.hisp.dhis.android.core.arch.db.stores.internal.IdentifiableObjectStore;
import org.hisp.dhis.android.core.arch.repositories.children.internal.ChildrenAppender;
import org.hisp.dhis.android.core.arch.repositories.collection.ReadOnlyCollectionRepository;
import org.hisp.dhis.android.core.arch.repositories.collection.ReadOnlyIdentifiableCollectionRepository;
import org.hisp.dhis.android.core.arch.repositories.filters.internal.DateFilterConnector;
import org.hisp.dhis.android.core.arch.repositories.filters.internal.FilterConnectorFactory;
import org.hisp.dhis.android.core.arch.repositories.filters.internal.StringFilterConnector;
import org.hisp.dhis.android.core.arch.repositories.scope.RepositoryScope;
import org.hisp.dhis.android.core.common.CoreObject;
import org.hisp.dhis.android.core.common.IdentifiableColumns;
import org.hisp.dhis.android.core.common.IdentifiableObject;
import java.util.Map;
public class ReadOnlyIdentifiableCollectionRepositoryImpl<M extends CoreObject & IdentifiableObject,
R extends ReadOnlyCollectionRepository<M>>
extends ReadOnlyWithUidCollectionRepositoryImpl<M, R>
implements ReadOnlyIdentifiableCollectionRepository<M, R> {
public ReadOnlyIdentifiableCollectionRepositoryImpl(final IdentifiableObjectStore<M> store,
final Map<String, ChildrenAppender<M>> childrenAppenders,
final RepositoryScope scope,
final FilterConnectorFactory<R> cf) {
super(store, childrenAppenders, scope, cf);
}
@Override
public StringFilterConnector<R> byUid() {
return cf.string(IdentifiableColumns.UID);
}
@Override
public StringFilterConnector<R> byCode() {
return cf.string(IdentifiableColumns.CODE);
}
@Override
public StringFilterConnector<R> byName() {
return cf.string(IdentifiableColumns.NAME);
}
@Override
public StringFilterConnector<R> byDisplayName() {
return cf.string(IdentifiableColumns.DISPLAY_NAME);
}
@Override
public DateFilterConnector<R> byCreated() {
return cf.date(IdentifiableColumns.CREATED);
}
@Override
public DateFilterConnector<R> byLastUpdated() {
return cf.date(IdentifiableColumns.LAST_UPDATED);
}
public R orderByUid(RepositoryScope.OrderByDirection direction) {
return cf.withOrderBy(IdentifiableColumns.UID, direction);
}
public R orderByCode(RepositoryScope.OrderByDirection direction) {
return cf.withOrderBy(IdentifiableColumns.CODE, direction);
}
public R orderByName(RepositoryScope.OrderByDirection direction) {
return cf.withOrderBy(IdentifiableColumns.NAME, direction);
}
public R orderByDisplayName(RepositoryScope.OrderByDirection direction) {
return cf.withOrderBy(IdentifiableColumns.DISPLAY_NAME, direction);
}
public R orderByCreated(RepositoryScope.OrderByDirection direction) {
return cf.withOrderBy(IdentifiableColumns.CREATED, direction);
}
public R orderByLastUpdated(RepositoryScope.OrderByDirection direction) {
return cf.withOrderBy(IdentifiableColumns.LAST_UPDATED, direction);
}
} |
// Copyright 2021 Saferwall. All rights reserved.
// Use of this source code is governed by Apache v2 license
// license that can be found in the LICENSE file.
package trendmicro
import (
"path"
"testing"
"github.com/saferwall/saferwall/pkg/utils"
)
type filePathTest struct {
filepath string
want Result
}
func getAbsoluteFilePath(testfile string) string {
return path.Join(utils.GetRootProjectDir(), testfile)
}
var filepathScanTest = []filePathTest{
{getAbsoluteFilePath("test/multiav/clean/eicar.com"),
Result{Infected: true, Output: "Eicar_test_file"}},
}
func TestScanFilePath(t *testing.T) {
for _, tt := range filepathScanTest {
t.Run(tt.filepath, func(t *testing.T) {
got, err := ScanFile(tt.filepath)
if err != nil {
t.Fatalf("TestScanFilePath(%s) failed, err: %s",
tt.filepath, err)
}
if got != tt.want {
t.Errorf("TestScanFilePath(%s) got %v, want %v",
tt.filepath, got, tt.want)
}
})
}
}
|
#!/bin/bash
set +x
set -eo pipefail
export LC_ALL=C.UTF-8
export LANG=C.UTF-8
export PYTHON_VERSION=$1
export USE_DEV_WHEELS=$2
export CONDA_PKGS_DIRS=$HOME/.conda_packages
export MINICONDA=$HOME/miniconda
export MINICONDA_URL="https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh"
export PATH="$MINICONDA/bin:$PATH"
wget --no-verbose -O miniconda.sh $MINICONDA_URL
bash miniconda.sh -b -p $MINICONDA
export PATH="$MINICONDA/bin:$PATH"
conda update -y -q conda
conda config --set auto_update_conda false
conda config --add channels https://repo.continuum.io/pkgs/free
conda config --add channels conda-forge
conda create -y -q -n fletcher python=${PYTHON_VERSION} \
pandas pyarrow=0.14 pytest pytest-cov pytest-flake8 \
hypothesis \
flake8 \
setuptools_scm \
pip \
numba \
codecov \
six \
sphinx \
-c conda-forge
source activate fletcher
if [ "${PYTHON_VERSION}" = "3.6" ]; then
conda install -y -q black=19.3b0 -c conda-forge
black --check .
fi
if [[ ${USE_DEV_WHEELS} ]]; then
echo "Installing NumPy and Pandas dev"
conda uninstall -y --force numpy pandas
PRE_WHEELS="https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com"
pip install --pre --no-deps --upgrade --timeout=60 -f $PRE_WHEELS numpy pandas
fi
pip install --no-deps -e .
py.test --junitxml=test-reports/junit.xml --cov=./
# Do a second run with JIT disabled to produce coverage and check that the
# code works also as expected in Python.
if [ "${PYTHON_VERSION}" = "3.6" ]; then
# These don't work with Python 2.7 as it supports less operators than 3.6
NUMBA_DISABLE_JIT=1 py.test --junitxml=test-reports/junit.xml --cov=./
fi
# Upload coverage in each build, codecov.io merges the reports
codecov
# Check documentation build only in one job
if [ "${PYTHON_VERSION}" = "3.6" ]; then
pushd docs
make html
popd
fi
|
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import EnvelopeOpenSvg from '@rsuite/icon-font/lib/legacy/EnvelopeOpen';
const EnvelopeOpen = createSvgIcon({
as: EnvelopeOpenSvg,
ariaLabel: 'envelope open',
category: 'legacy',
displayName: 'EnvelopeOpen'
});
export default EnvelopeOpen;
|
package week1.controllers;
import week1.exeptions.InvalidLoginException;
import week1.interfaces.IAppDb;
import week1.interfaces.ITerminalController;
import week1.models.Bill;
import week1.models.Product;
import week1.models.Salesman;
import java.util.Comparator;
import java.util.List;
/**
* Created by ENIAC on 24.11.2017.
*/
public class ProxyTerminalControllerImpl implements ITerminalController {
private ITerminalController terminalController;
public ProxyTerminalControllerImpl(ITerminalController terminalController) {
this.terminalController = terminalController;
}
@Override
public boolean login(String login, String pass) throws InvalidLoginException {
System.out.println("Logg: login attempt");
terminalController.login(login, pass);
return false;
}
@Override
public Bill createBill() {
System.out.println("Logg: create bill attempt");
return terminalController.createBill();
}
@Override
public IAppDb getDb() {
return terminalController.getDb();
}
@Override
public List<Bill> getAllBills() {
return terminalController.getAllBills();
}
@Override
public Bill addProduct(int billId, Product product) {
System.out.println("Logg: add product attempt");
return terminalController.addProduct(billId, product);
}
@Override
public Bill closeBill(int id) {
System.out.println("Logg: close bill attempt");
return terminalController.closeBill(id);
}
@Override
public Bill findBillById(int billId) {
System.out.println("Logg: find bill by id attempt");
return terminalController.findBillById(billId);
}
@Override
public List<Bill> filterForBills(String start, String end, Comparator<Bill> comparator) {
System.out.println("Logg: filter for bills attempt");
return terminalController.filterForBills(start, end, comparator);
}
@Override
public List<Product> getAllProducts() {
return terminalController.getAllProducts();
}
@Override
public Salesman findSalesmanByLogin(String login) {
System.out.println("Logg: find salesman by login attempt");
return terminalController.findSalesmanByLogin(login);
}
@Override
public Salesman getTopOfSalesmans() {
System.out.println("Logg: get top of salesmans attempt");
return terminalController.getTopOfSalesmans();
}
@Override
public int getCurrentSalesmanIndex() {
return terminalController.getCurrentSalesmanIndex();
}
@Override
public void setCurrentSalesmanIndex(int i) {
terminalController.setCurrentSalesmanIndex(i);
}
}
|
curl -X POST -vu kiditz:rioters7 http://localhost:5000/oauth/check_token -d "access_token=$1"
|
#
# Ripgrep configuration module for zsh.
#
# Abort of requirements are not met.
if (( ! $+commands[rg] )); then
return 1
fi
# Path to configuration file.
export RIPGREP_CONFIG_PATH="$XDG_CONFIG_HOME/ripgrep/config"
# Check dependencies for interactive searches.
if (( ! $+commands[fzf] )); then
return 2
fi
# Default rg commands for integration with fzf.
FZF_RG_COMMAND='noglob rg --files-with-matches --no-messages'
FZF_RG_PREVIEW='noglob rg --pretty --context=10 2>/dev/null'
# Search file contents for the given pattern and preview matches.
# Selected entries are opened with the default opener.
#
# Usage: search <pattern> [rg-options...]
function search {
[[ ! $1 || $1 == -* ]] && echo "fs: missing rg pattern" && return 1
local pat="$1" && shift
local selected=($( \
FZF_HEIGHT=${FZF_HEIGHT:-90%} \
FZF_DEFAULT_COMMAND="$FZF_RG_COMMAND $* '$pat'" \
fzf \
--multi \
--preview "$FZF_RG_PREVIEW $* '$pat' {}" \
--preview-window=wrap
))
# Open selected files.
[[ $selected ]] && for f in ${selected}; do open $f || return 1; done
}
# Search files interactively and preview matches.
# Selected entries are opened with the default opener.
# NOTE: The optional directory MUST be given as first argument,
# otherwise the behavior is undefined.
#
# Usage: search-interactive [dir] [rg-options...]
function search-interactive {
local dir
[[ $1 && $1 != -* ]] && dir=$1 && shift
local selected=($( \
FZF_HEIGHT=${FZF_HEIGHT:-90%} \
FZF_DEFAULT_COMMAND="rg --files $* $dir" \
fzf \
--multi \
--phony \
--bind "change:reload:$FZF_RG_COMMAND {q} $* $dir || true" \
--preview "$FZF_RG_PREVIEW {q} {} $*" \
--preview-window=wrap \
| cut -d":" -f1,2
))
# Open selected files.
[[ $selected ]] && for f in ${selected}; do open $f || return 1; done
}
# Interactive search variant that sorts the results.
# This disables parallelism and makes searches much slower.
function search-interactive-sorted {
search-interactive "$@" --sort=path
}
# Usability aliases.
alias fs='search'
alias ff='search-interactive'
alias ffs='search-interactive-sorted'
# Add function dir to fpath for completions.
autoload_dir ${0:h}/function
|
<gh_stars>0
package main
import (
"log"
"github.com/okuuva/nvd-search-cli/nvd-search"
"github.com/docopt/docopt-go"
"github.com/mitchellh/go-homedir"
)
func main() {
usage := `Usage: nvd-search [-c CVE | -k KEY] [-v VENDOR] [-p PRODUCT] [-n NVD]
Options:
-h --help show this
-c CVE --cve CVE CVE-ID of the vulnerability [default: ]
-k KEY --key KEY keyword search [default: ]
-v VENDOR --vendor VENDOR CPE vendor name [default: ]
-p PRODUCT --product PRODUCT CPE product name [default: ]
-n NVD --nvd NVD Location of the local NVD [default: ~/.config/nvd-cli/db]
`
args, _ := docopt.Parse(usage, nil, true, "nvd-cli 0.1", false)
path, err := homedir.Expand(args["--nvd"].(string))
if err != nil {
log.Fatal(err)
}
nvd_search.Search(args["--cve"].(string), args["--key"].(string), args["--vendor"].(string), args["--product"].(string), path)
}
|
import React from "react"
import { Link } from "gatsby"
import Layout from "../components/layout"
import productpageStyles from "../styles/productpage.module.css"
import { FaChartLine, FaChartPie, FaMobileAlt, FaDatabase } from 'react-icons/fa';
import { IoIosArrowUp } from 'react-icons/io';
const ProductPage = () => {
return (
<Layout>
<div className={productpageStyles.section1}>
<h1>Tuotteet</h1>
<p><Link className={productpageStyles.link} to="/frontpage">Etusivu</Link> / Tuotteet</p>
</div>
<div className={productpageStyles.section2}>
<div className={productpageStyles.text2}>
<p>Kaikkien tuotteidemme lähtökohtana on helppokäyttöisyys ja osuvuus. Palvelut on mietitty siten, että ne tuovat helpotusta päivittäiseen tekemiseen ja tehostavat siten prosesseja.</p>
<p>Tuotteita käytetään digitaalisesti ja pääosin lisenssipohjaisesti. Kun lisenssi on käytössä, voi tuotteita hyödyntää täysimääräisesti ja esimerkiksi sisältöjä voi vaihtaa tarvittaessa useinkin.</p>
</div>
<div className={productpageStyles.container2}>
<div className={productpageStyles.productpagebox}>
<div className={productpageStyles.row}>
<FaChartLine size='70px' color='rgb(68, 27, 66)' />
<h2><NAME></h2>
</div>
<p><NAME> on työkalu matkailun vaikutuksien realiaiakaiseen seurantaan. Matkailutulon, motiivin, demografiset tiedot sekä tyytyväisyys.</p>
<button className={productpageStyles.boxbtn}>Lue lisää</button>
</div>
<div className={productpageStyles.productpagebox}>
<div className={productpageStyles.row}>
<FaChartPie size='70px' color='rgb(68, 27, 66)' />
<h2>Salmi Eventlizer</h2>
</div>
<p>Salmi Eventlizer on tehty tapahtumien, kokousten ja kongressien talousvaikutusten, kävijäprofiilien ja tyytyväisyyden analysointiin.</p>
<button className={productpageStyles.boxbtn}>Lue lisää</button>
</div>
<div className={productpageStyles.productpagebox}>
<div className={productpageStyles.row}>
<FaMobileAlt size='70px' color='rgb(68, 27, 66)' />
<h2>Salmi AR</h2>
</div>
<p>Salmi AR tekee lisätyn todellisuuden eli AR:n käyttöönoton helpoksi. Erityisesti Salmi AR soveltuu erilaisten virtuaalisten opaskierrosten toteuttamiseen.</p>
<button className={productpageStyles.boxbtn}>Lue lisää</button>
</div>
<div className={productpageStyles.productpagebox}>
<div className={productpageStyles.row}>
<FaDatabase size='70px' color='rgb(68, 27, 66)' />
<h2>Salmi Data</h2>
</div>
<p>Salmi Data on tietokantaratkaisu, jolla hallitaan suuria matkailun tietokokonaisuuksia. Tuo tietoa muualta ja vie sitä myös toisiin järjestelmiin.</p>
<button className={productpageStyles.boxbtn}>Lue lisää</button>
</div>
</div>
</div>
<div className={productpageStyles.section3}>
<img src="/ota_yhteytta.png" height='270px' width='370px'></img>
<div className={productpageStyles.form}>
<form>
Nimi (pakollinen)<br />
<input type="text" id="name" name="name" /><br />
Sähköposti (pakollinen)<br />
<input type="text" id="email" name="email" /><br />
Puhelinnumero<br />
<input type="text" id="puh" name="puh" /><br />
Aihe<br />
<input type="text" id="topic" name="topic" /><br />
Viesti<br />
<textarea name="message" rows="10" cols="100"></textarea><br />
<input type="submit" value="Lähetä" className={productpageStyles.rndbtn} />
</form>
</div>
</div>
<a href="#" className={productpageStyles.topbtn}><IoIosArrowUp /></a>
</Layout>
)
}
export default ProductPage |
package de.ids_mannheim.korap.web.controller;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.SecurityContext;
import javax.ws.rs.core.UriInfo;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import com.fasterxml.jackson.databind.JsonNode;
import com.sun.jersey.spi.container.ResourceFilters;
import de.ids_mannheim.korap.authentication.AuthenticationManager;
import de.ids_mannheim.korap.config.Attributes;
import de.ids_mannheim.korap.config.Scopes;
import de.ids_mannheim.korap.exceptions.KustvaktException;
import de.ids_mannheim.korap.security.context.TokenContext;
import de.ids_mannheim.korap.user.KorAPUser;
import de.ids_mannheim.korap.user.User;
import de.ids_mannheim.korap.user.UserDetails;
import de.ids_mannheim.korap.user.UserQuery;
import de.ids_mannheim.korap.user.UserSettingProcessor;
import de.ids_mannheim.korap.user.Userdata;
import de.ids_mannheim.korap.utils.JsonUtils;
import de.ids_mannheim.korap.utils.StringUtils;
import de.ids_mannheim.korap.web.KustvaktResponseHandler;
import de.ids_mannheim.korap.web.filter.APIVersionFilter;
import de.ids_mannheim.korap.web.filter.AuthenticationFilter;
import de.ids_mannheim.korap.web.filter.BlockingFilter;
import de.ids_mannheim.korap.web.filter.DemoUserFilter;
import de.ids_mannheim.korap.web.filter.PiwikFilter;
/**
*
* @author hanl, margaretha
*/
@Controller
@Path("{version}/shibboleth/user")
@Produces(MediaType.APPLICATION_JSON + ";charset=utf-8")
@ResourceFilters({APIVersionFilter.class, PiwikFilter.class })
public class ShibbolethUserController {
@Autowired
private KustvaktResponseHandler kustvaktResponseHandler;
private static Logger jlog = LogManager.getLogger(ShibbolethUserController.class);
@Autowired
private AuthenticationManager controller;
private @Context UriInfo info;
// EM: may be used for managing shib users
//todo: password update in special function? --> password reset only!
@POST
@Path("update")
@Consumes(MediaType.APPLICATION_JSON)
@ResourceFilters({ AuthenticationFilter.class, PiwikFilter.class,
BlockingFilter.class })
public Response updateAccount (@Context SecurityContext ctx, String json) {
TokenContext context = (TokenContext) ctx.getUserPrincipal();
try {
User user = controller.getUser(context.getUsername());
JsonNode node = JsonUtils.readTree(json);
KorAPUser ident = (KorAPUser) user;
KorAPUser values = User.UserFactory.toUser(json);
// user = controller
// .checkPasswordAllowance(ident, values.getPassword(),
// node.path("new_password").asText());
// controller.updateAccount(user);
}
catch (KustvaktException e) {
throw kustvaktResponseHandler.throwit(e);
}
return Response.ok().build();
}
// todo: refactor and make something out of if --> needs to give some sort of feedback!
@GET
@Path("info")
@ResourceFilters({ AuthenticationFilter.class, PiwikFilter.class,
BlockingFilter.class })
public Response getStatus (@Context SecurityContext context,
@QueryParam("scopes") String scopes) {
TokenContext ctx = (TokenContext) context.getUserPrincipal();
Scopes m;
try {
User user = controller.getUser(ctx.getUsername());
Userdata data = controller.getUserData(user, UserDetails.class);
Set<String> base_scope = StringUtils.toSet(scopes, " ");
if (scopes != null) base_scope.retainAll(StringUtils.toSet(scopes));
scopes = StringUtils.toString(base_scope);
m = Scopes.mapScopes(scopes, data);
return Response.ok(m.toEntity()).build();
}
catch (KustvaktException e) {
throw kustvaktResponseHandler.throwit(e);
}
}
@GET
@Path("settings")
@ResourceFilters({ AuthenticationFilter.class, DemoUserFilter.class,
PiwikFilter.class, BlockingFilter.class })
public Response getUserSettings (@Context SecurityContext context,
@Context Locale locale) {
TokenContext ctx = (TokenContext) context.getUserPrincipal();
String result;
try {
User user = controller.getUser(ctx.getUsername());
Userdata data = controller.getUserData(user, UserSettingProcessor.class);
data.setField(Attributes.USERNAME, ctx.getUsername());
result = data.serialize();
}
catch (KustvaktException e) {
jlog.error("Exception encountered!", e);
throw kustvaktResponseHandler.throwit(e);
}
return Response.ok(result).build();
}
@Deprecated
@POST
@Path("settings")
@Consumes({ MediaType.APPLICATION_JSON })
@ResourceFilters({ AuthenticationFilter.class, PiwikFilter.class,
BlockingFilter.class })
public Response updateSettings (@Context SecurityContext context,
@Context Locale locale, Map settings) {
TokenContext ctx = (TokenContext) context.getUserPrincipal();
if (settings == null) return Response.notModified().build();
try {
User user = controller.getUser(ctx.getUsername());
if (User.UserFactory.isDemo(ctx.getUsername()))
return Response.notModified().build();
Userdata data = controller.getUserData(user, UserSettingProcessor.class);
// todo: check setting only within the scope of user settings permissions; not foundry range. Latter is part of
// frontend which only displays available foundries and
// SecurityManager.findbyId(us.getDefaultConstfoundry(), user, Foundry.class);
// SecurityManager.findbyId(us.getDefaultLemmafoundry(), user, Foundry.class);
// SecurityManager.findbyId(us.getDefaultPOSfoundry(), user, Foundry.class);
// SecurityManager.findbyId(us.getDefaultRelfoundry(), user, Foundry.class);
Userdata new_data = new UserSettingProcessor(user.getId());
new_data.readQuietly((Map<String, Object>) settings, false);
data.update(new_data);
controller.updateUserData(data);
}
catch (KustvaktException e) {
jlog.error("Exception encountered!", e);
throw kustvaktResponseHandler.throwit(e);
}
return Response.ok().build();
}
@GET
@Path("details")
@ResourceFilters({ AuthenticationFilter.class, DemoUserFilter.class,
PiwikFilter.class, BlockingFilter.class })
public Response getDetails (@Context SecurityContext context,
@Context Locale locale, @QueryParam("pointer") String pointer) {
TokenContext ctx = (TokenContext) context.getUserPrincipal();
String result;
try {
User user = controller.getUser(ctx.getUsername());
Userdata data = controller.getUserData(user, UserDetails.class);
data.setField(Attributes.USERNAME, ctx.getUsername());
if (pointer != null)
result = data.get(pointer).toString();
else
result = data.serialize();
}
catch (KustvaktException e) {
jlog.error("Exception encountered: "+ e.string());
throw kustvaktResponseHandler.throwit(e);
}
return Response.ok(result).build();
}
// EM: may be used for managing shib users
@POST
@Path("details")
@Consumes({ MediaType.APPLICATION_JSON })
@ResourceFilters({ AuthenticationFilter.class, PiwikFilter.class,
BlockingFilter.class })
public Response updateDetails (@Context SecurityContext context,
@Context Locale locale, Map details) {
TokenContext ctx = (TokenContext) context.getUserPrincipal();
if (details == null) return Response.notModified().build();
try {
User user = controller.getUser(ctx.getUsername());
if (User.UserFactory.isDemo(ctx.getUsername()))
return Response.notModified().build();
UserDetails new_data = new UserDetails(user.getId());
new_data.readQuietly((Map<String, Object>) details, false);
UserDetails det = controller.getUserData(user, UserDetails.class);
det.update(new_data);
controller.updateUserData(det);
}
catch (KustvaktException e) {
jlog.error("Exception encountered!", e);
throw kustvaktResponseHandler.throwit(e);
}
return Response.ok().build();
}
//fixme: if policy allows, foreign user might be allowed to change search!
@POST
@Path("queries")
@Consumes(MediaType.APPLICATION_JSON)
@ResourceFilters({ AuthenticationFilter.class, PiwikFilter.class,
BlockingFilter.class })
public Response updateQueries (@Context SecurityContext context,
String json) {
TokenContext ctx = (TokenContext) context.getUserPrincipal();
Collection<UserQuery> add = new HashSet<>();
try {
User user = controller.getUser(ctx.getUsername());
List<UserQuery> userQuieres = new ArrayList<>();
JsonNode nodes = JsonUtils.readTree(json);
Iterator<JsonNode> node = nodes.elements();
while (node.hasNext()) {
JsonNode cursor = node.next();
UserQuery query =
new UserQuery(cursor.path("id").asInt(), user.getId());
query.setQueryLanguage(cursor.path("queryLanguage").asText());
query.setQuery(cursor.path("query").asText());
query.setDescription(cursor.path("description").asText());
userQuieres.add(query);
}
//1: add all that are new, update all that are retained, delete the rest
// Set<UserQuery> resources = ResourceFinder
// .search(user, UserQuery.class);
//
// add.addAll(userQuieres);
// add.removeAll(resources);
// Collection<UserQuery> update = new HashSet<>(userQuieres);
// update.retainAll(resources);
// resources.removeAll(userQuieres);
//
// if (!update.isEmpty()) {
// resourceHandler.updateResources(user,
// update.toArray(new UserQuery[update.size()]));
// }
// if (!add.isEmpty()) {
// resourceHandler.storeResources(user,
// add.toArray(new UserQuery[add.size()]));
// }
// if (!resources.isEmpty()) {
// resourceHandler.deleteResources(user,
// resources.toArray(new UserQuery[resources.size()]));
// }
}
catch (KustvaktException e) {
jlog.error("Exception encountered!", e);
throw kustvaktResponseHandler.throwit(e);
}
try {
return Response.ok(JsonUtils.toJSON(add)).build();
}
catch (KustvaktException e) {
throw kustvaktResponseHandler.throwit(e);
}
}
// EM: may be used for managing shib users
@DELETE
@ResourceFilters({ AuthenticationFilter.class, PiwikFilter.class,
BlockingFilter.class })
public Response deleteUser (@Context SecurityContext context) {
TokenContext ctx = (TokenContext) context.getUserPrincipal();
try {
User user = controller.getUser(ctx.getUsername());
//todo: test that demo user cannot be deleted!
controller.deleteAccount(user);
}
catch (KustvaktException e) {
jlog.error("Exception encountered!", e);
throw kustvaktResponseHandler.throwit(e);
}
return Response.ok().build();
}
@GET
@Path("queries")
@ResourceFilters({ AuthenticationFilter.class, DemoUserFilter.class,
PiwikFilter.class, BlockingFilter.class })
public Response getQueries (@Context SecurityContext context,
@Context Locale locale) {
TokenContext ctx = (TokenContext) context.getUserPrincipal();
String queryStr;
try {
User user = controller.getUser(ctx.getUsername());
// Set<UserQuery> queries = ResourceFinder
// .search(user, UserQuery.class);
// queryStr = JsonUtils.toJSON(queries);
//todo:
queryStr = "";
}
catch (KustvaktException e) {
jlog.error("Exception encountered!", e);
throw kustvaktResponseHandler.throwit(e);
}
return Response.ok(queryStr).build();
}
}
|
package service
import (
"context"
"errors"
"github.com/container-storage-interface/spec/lib/go/csi"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
"io/ioutil"
"os"
"regexp"
"strconv"
)
var (
ephemeralStagingMountPath = "/var/lib/kubelet/plugins/kubernetes.io/csi/pv/ephemeral/"
)
func (s *service) fileExist(filename string) bool {
_, err := os.Stat(filename)
if err != nil && os.IsNotExist(err) {
return false
}
return true
}
func parseSize(size string) (int64, error) {
pattern := `(\d*) ?Gi$`
pathMetadata := regexp.MustCompile(pattern)
matches := pathMetadata.FindStringSubmatch(size)
for i, match := range matches {
if i != 0 {
bytes, err := strconv.ParseInt(match, 10, 64)
if err != nil {
return 0, errors.New("Failed to parse bytes")
}
return bytes * 1073741824, nil
}
}
message := "failed to parse bytes for string: " + size
return 0, errors.New(message)
}
//Call complete stack: systemProbe, CreateVolume, ControllerPublishVolume, and NodePublishVolume
func (s *service) ephemeralNodePublish(
ctx context.Context,
req *csi.NodePublishVolumeRequest) (
*csi.NodePublishVolumeResponse, error) {
if _, err := os.Stat(ephemeralStagingMountPath); os.IsNotExist(err) {
Log.Debug("path does not exist, will attempt to create it")
err = os.MkdirAll(ephemeralStagingMountPath, 0750)
if err != nil {
Log.Errorf("ephemeralNodePublish: %s", err.Error())
return nil, status.Error(codes.Internal, "Unable to create directory for mounting ephemeral volumes")
}
}
volID := req.GetVolumeId()
volName := req.VolumeContext["volumeName"]
if len(volName) > 31 {
Log.Errorf("Volume name: %s is over 32 characters, too long.", volName)
return nil, status.Error(codes.Internal, "Volume name too long")
}
if volName == "" {
Log.Errorf("Missing Parameter: volumeName must be specified in volume attributes section for ephemeral volumes")
return nil, status.Error(codes.Internal, "Volume name not specified")
}
volSize, err := parseSize(req.VolumeContext["size"])
if err != nil {
Log.Errorf("Parse size failed %s", err.Error())
return nil, status.Error(codes.Internal, "inline ephemeral parse size failed")
}
systemName := req.VolumeContext["systemID"]
if systemName == "" {
Log.Info("systemName not specified, using default array")
systemName = s.opts.defaultSystemID
}
array := s.opts.arrays[systemName]
if array == nil {
//to get inside this if block, req has name, but secret has ID, need to convert from name -> ID
if id, ok := s.connectedSystemNameToID[systemName]; ok {
//systemName was sent in req, but secret used ID. Change to ID.
Log.Debug("systemName set to id")
array = s.opts.arrays[id]
} else {
err = status.Errorf(codes.Internal, "systemID: %s not recgonized", systemName)
Log.Errorf("Error from ephemeralNodePublish: %v ", err)
return nil, err
}
}
err = s.systemProbe(ctx, array)
if err != nil {
Log.Errorf("systemProb Ephemeral %s", err.Error())
return nil, status.Error(codes.Internal, "inline ephemeral system prob failed: "+err.Error())
}
crvolresp, err := s.CreateVolume(ctx, &csi.CreateVolumeRequest{
Name: volName,
CapacityRange: &csi.CapacityRange{
RequiredBytes: volSize,
LimitBytes: 0,
},
VolumeCapabilities: []*csi.VolumeCapability{req.VolumeCapability},
Parameters: req.VolumeContext,
Secrets: req.Secrets,
})
if err != nil {
Log.Errorf("CreateVolume Ephemeral %s", err.Error())
return nil, status.Error(codes.Internal, "inline ephemeral create volume failed: "+err.Error())
}
Log.Infof("volume ID returned from CreateVolume is: %s ", crvolresp.Volume.VolumeId)
//Create lockfile to map vol ID from request to volID returned by CreateVolume
// will also be used to determine if volume is ephemeral in NodeUnpublish
errLock := os.MkdirAll(ephemeralStagingMountPath+volID, 0750)
if errLock != nil {
return nil, errLock
}
f, errLock := os.Create(ephemeralStagingMountPath + volID + "/id")
if errLock != nil {
return nil, errLock
}
defer f.Close() //#nosec
_, errLock = f.WriteString(crvolresp.Volume.VolumeId)
if errLock != nil {
return nil, errLock
}
volumeID := crvolresp.Volume.VolumeId
//in case systemName was not given with volume context
systemName = s.getSystemIDFromCsiVolumeID(volumeID)
if systemName == "" {
Log.Errorf("getSystemIDFromCsiVolumeID was not able to determine systemName from volID: %s", volumeID)
return nil, status.Error(codes.Internal, "inline ephemeral getSystemIDFromCsiVolumeID failed ")
}
NodeID := s.opts.SdcGUID
cpubresp, err := s.ControllerPublishVolume(ctx, &csi.ControllerPublishVolumeRequest{
NodeId: NodeID,
VolumeId: volumeID,
VolumeCapability: req.VolumeCapability,
Readonly: req.Readonly,
Secrets: req.Secrets,
VolumeContext: crvolresp.Volume.VolumeContext,
})
if err != nil {
Log.Infof("Rolling back and calling unpublish ephemeral volumes with VolId %s", crvolresp.Volume.VolumeId)
_, _ = s.NodeUnpublishVolume(ctx, &csi.NodeUnpublishVolumeRequest{
VolumeId: volID,
TargetPath: req.TargetPath,
})
return nil, status.Error(codes.Internal, "inline ephemeral controller publish failed: "+err.Error())
}
_, err = s.NodePublishVolume(ctx, &csi.NodePublishVolumeRequest{
VolumeId: volumeID,
PublishContext: cpubresp.PublishContext,
StagingTargetPath: ephemeralStagingMountPath,
TargetPath: req.TargetPath,
VolumeCapability: req.VolumeCapability,
Readonly: req.Readonly,
Secrets: req.Secrets,
VolumeContext: crvolresp.Volume.VolumeContext,
})
if err != nil {
Log.Errorf("NodePublishErrEph %s", err.Error())
_, _ = s.NodeUnpublishVolume(ctx, &csi.NodeUnpublishVolumeRequest{
VolumeId: volID,
TargetPath: req.TargetPath,
})
return nil, status.Error(codes.Internal, "inline ephemeral node publish failed: "+err.Error())
}
return &csi.NodePublishVolumeResponse{}, nil
}
//Call stack: ControllerUnpublishVolume, DeleteVolume (NodeUnpublish will be already called by the time this method is called)
//remove lockfile
func (s *service) ephemeralNodeUnpublish(
ctx context.Context,
req *csi.NodeUnpublishVolumeRequest) error {
Log.Infof("Called ephemeral Node unpublish")
volID := req.GetVolumeId()
if volID == "" {
return status.Error(codes.InvalidArgument, "volume ID is required")
}
lockFile := ephemeralStagingMountPath + volID + "/id"
//while a file is being read from, it's a file determined by volID and is written by the driver
/* #nosec G304 */
dat, err := ioutil.ReadFile(lockFile)
if err != nil && os.IsNotExist(err) {
return status.Error(codes.Internal, "Inline ephemeral. Was unable to read lockfile")
}
goodVolid := string(dat)
NodeID := s.opts.SdcGUID
_, err = s.ControllerUnpublishVolume(ctx, &csi.ControllerUnpublishVolumeRequest{
VolumeId: goodVolid,
NodeId: NodeID,
})
if err != nil {
return errors.New("Inline ephemeral controller unpublish failed")
}
_, err = s.DeleteVolume(ctx, &csi.DeleteVolumeRequest{
VolumeId: goodVolid,
})
if err != nil {
return err
}
err = os.RemoveAll(ephemeralStagingMountPath + volID)
if err != nil {
return errors.New("failed to cleanup lock files")
}
return nil
}
|
# Outputs current branch info in prompt format
git_prompt_info()
{
if [[ "$(command git config --get red-pill.hide-status 2>/dev/null)" != "1" ]]; then
local ref
ref=$(command git symbolic-ref HEAD 2> /dev/null) || \
ref=$(command git rev-parse --short HEAD 2> /dev/null) || return 0
echo "$ZSH_THEME_GIT_PROMPT_PREFIX${ref#refs/heads/}$(parse_git_dirty)$ZSH_THEME_GIT_PROMPT_SUFFIX"
fi
}
# Checks if working tree is dirty
parse_git_dirty()
{
local STATUS=''
local -a FLAGS
FLAGS=('--porcelain')
if [[ "$(command git config --get red-pill.hide-dirty)" != "1" ]]; then
if [[ $POST_1_7_2_GIT -gt 0 ]]; then
FLAGS+='--ignore-submodules=dirty'
fi
if [[ "$DISABLE_UNTRACKED_FILES_DIRTY" == "true" ]]; then
FLAGS+='--untracked-files=no'
fi
STATUS=$(command git status ${FLAGS} 2> /dev/null | tail -n1)
fi
if [[ -n $STATUS ]]; then
echo "$ZSH_THEME_GIT_PROMPT_DIRTY"
else
echo "$ZSH_THEME_GIT_PROMPT_CLEAN"
fi
}
# Gets the difference between the local and remote branches
git_remote_status()
{
local remote ahead behind git_remote_status git_remote_status_detailed
remote=${$(command git rev-parse --verify ${hook_com[branch]}@{upstream} --symbolic-full-name 2>/dev/null)/refs\/remotes\/}
if [[ -n ${remote} ]]; then
ahead=$(command git rev-list ${hook_com[branch]}@{upstream}..HEAD 2>/dev/null | wc -l)
behind=$(command git rev-list HEAD..${hook_com[branch]}@{upstream} 2>/dev/null | wc -l)
if [[ $ahead -eq 0 ]] && [[ $behind -eq 0 ]]; then
git_remote_status="$ZSH_THEME_GIT_PROMPT_EQUAL_REMOTE"
elif [[ $ahead -gt 0 ]] && [[ $behind -eq 0 ]]; then
git_remote_status="$ZSH_THEME_GIT_PROMPT_AHEAD_REMOTE"
git_remote_status_detailed="$ZSH_THEME_GIT_PROMPT_AHEAD_REMOTE_COLOR$ZSH_THEME_GIT_PROMPT_AHEAD_REMOTE$((ahead))%{$reset_color%}"
elif [[ $behind -gt 0 ]] && [[ $ahead -eq 0 ]]; then
git_remote_status="$ZSH_THEME_GIT_PROMPT_BEHIND_REMOTE"
git_remote_status_detailed="$ZSH_THEME_GIT_PROMPT_BEHIND_REMOTE_COLOR$ZSH_THEME_GIT_PROMPT_BEHIND_REMOTE$((behind))%{$reset_color%}"
elif [[ $ahead -gt 0 ]] && [[ $behind -gt 0 ]]; then
git_remote_status="$ZSH_THEME_GIT_PROMPT_DIVERGED_REMOTE"
git_remote_status_detailed="$ZSH_THEME_GIT_PROMPT_AHEAD_REMOTE_COLOR$ZSH_THEME_GIT_PROMPT_AHEAD_REMOTE$((ahead))%{$reset_color%}$ZSH_THEME_GIT_PROMPT_BEHIND_REMOTE_COLOR$ZSH_THEME_GIT_PROMPT_BEHIND_REMOTE$((behind))%{$reset_color%}"
fi
if [[ -n $ZSH_THEME_GIT_PROMPT_REMOTE_STATUS_DETAILED ]]; then
git_remote_status="$ZSH_THEME_GIT_PROMPT_REMOTE_STATUS_PREFIX$remote$git_remote_status_detailed$ZSH_THEME_GIT_PROMPT_REMOTE_STATUS_SUFFIX"
fi
echo $git_remote_status
fi
}
# Outputs the name of the current branch
# Usage example: git pull origin $(git_current_branch)
# Using '--quiet' with 'symbolic-ref' will not cause a fatal error (128) if
# it's not a symbolic ref, but in a Git repo.
git_current_branch()
{
local ref
ref=$(command git symbolic-ref --quiet HEAD 2> /dev/null)
local ret=$?
if [[ $ret != 0 ]]; then
[[ $ret == 128 ]] && return # no git repo.
ref=$(command git rev-parse --short HEAD 2> /dev/null) || return
fi
echo ${ref#refs/heads/}
}
# Gets the number of commits ahead from remote
git_commits_ahead()
{
if command git rev-parse --git-dir &>/dev/null; then
local commits="$(git rev-list --count @{upstream}..HEAD)"
if [[ "$commits" != 0 ]]; then
echo "$ZSH_THEME_GIT_COMMITS_AHEAD_PREFIX$commits$ZSH_THEME_GIT_COMMITS_AHEAD_SUFFIX"
fi
fi
}
# Gets the number of commits behind remote
git_commits_behind()
{
if command git rev-parse --git-dir &>/dev/null; then
local commits="$(git rev-list --count HEAD..@{upstream})"
if [[ "$commits" != 0 ]]; then
echo "$ZSH_THEME_GIT_COMMITS_BEHIND_PREFIX$commits$ZSH_THEME_GIT_COMMITS_BEHIND_SUFFIX"
fi
fi
}
# Outputs if current branch is ahead of remote
git_prompt_ahead()
{
if [[ -n "$(command git rev-list origin/$(git_current_branch)..HEAD 2> /dev/null)" ]]; then
echo "$ZSH_THEME_GIT_PROMPT_AHEAD"
fi
}
# Outputs if current branch is behind remote
git_prompt_behind()
{
if [[ -n "$(command git rev-list HEAD..origin/$(git_current_branch) 2> /dev/null)" ]]; then
echo "$ZSH_THEME_GIT_PROMPT_BEHIND"
fi
}
# Outputs if current branch exists on remote or not
git_prompt_remote()
{
if [[ -n "$(command git show-ref origin/$(git_current_branch) 2> /dev/null)" ]]; then
echo "$ZSH_THEME_GIT_PROMPT_REMOTE_EXISTS"
else
echo "$ZSH_THEME_GIT_PROMPT_REMOTE_MISSING"
fi
}
# Formats prompt string for current git commit short SHA
git_prompt_short_sha()
{
local SHA
SHA=$(command git rev-parse --short HEAD 2> /dev/null) && echo "$ZSH_THEME_GIT_PROMPT_SHA_BEFORE$SHA$ZSH_THEME_GIT_PROMPT_SHA_AFTER"
}
# Formats prompt string for current git commit long SHA
git_prompt_long_sha()
{
local SHA
SHA=$(command git rev-parse HEAD 2> /dev/null) && echo "$ZSH_THEME_GIT_PROMPT_SHA_BEFORE$SHA$ZSH_THEME_GIT_PROMPT_SHA_AFTER"
}
# Get the status of the working tree
git_prompt_status()
{
local INDEX STATUS
INDEX=$(command git status --porcelain -b 2> /dev/null)
STATUS=""
if $(echo "$INDEX" | command grep -E '^\?\? ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_UNTRACKED$STATUS"
fi
if $(echo "$INDEX" | grep '^A ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_ADDED$STATUS"
elif $(echo "$INDEX" | grep '^M ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_ADDED$STATUS"
elif $(echo "$INDEX" | grep '^MM ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_ADDED$STATUS"
fi
if $(echo "$INDEX" | grep '^ M ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_MODIFIED$STATUS"
elif $(echo "$INDEX" | grep '^AM ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_MODIFIED$STATUS"
elif $(echo "$INDEX" | grep '^MM ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_MODIFIED$STATUS"
elif $(echo "$INDEX" | grep '^ T ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_MODIFIED$STATUS"
fi
if $(echo "$INDEX" | grep '^R ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_RENAMED$STATUS"
fi
if $(echo "$INDEX" | grep '^ D ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_DELETED$STATUS"
elif $(echo "$INDEX" | grep '^D ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_DELETED$STATUS"
elif $(echo "$INDEX" | grep '^AD ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_DELETED$STATUS"
fi
if $(command git rev-parse --verify refs/stash >/dev/null 2>&1); then
STATUS="$ZSH_THEME_GIT_PROMPT_STASHED$STATUS"
fi
if $(echo "$INDEX" | grep '^UU ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_UNMERGED$STATUS"
fi
if $(echo "$INDEX" | grep '^## [^ ]\+ .*ahead' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_AHEAD$STATUS"
fi
if $(echo "$INDEX" | grep '^## [^ ]\+ .*behind' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_BEHIND$STATUS"
fi
if $(echo "$INDEX" | grep '^## [^ ]\+ .*diverged' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_DIVERGED$STATUS"
fi
echo $STATUS
}
# Compares the provided version of git to the version installed and on path
# Outputs -1, 0, or 1 if the installed version is less than, equal to, or
# greater than the input version, respectively.
git_compare_version()
{
local INPUT_GIT_VERSION INSTALLED_GIT_VERSION
INPUT_GIT_VERSION=(${(s/./)1})
INSTALLED_GIT_VERSION=($(command git --version 2>/dev/null))
INSTALLED_GIT_VERSION=(${(s/./)INSTALLED_GIT_VERSION[3]})
for i in {1..3}; do
if [[ $INSTALLED_GIT_VERSION[$i] -gt $INPUT_GIT_VERSION[$i] ]]; then
echo 1
return 0
fi
if [[ $INSTALLED_GIT_VERSION[$i] -lt $INPUT_GIT_VERSION[$i] ]]; then
echo -1
return 0
fi
done
echo 0
}
# Outputs the name of the current user
# Usage example: $(git_current_user_name)
function git_current_user_name() {
command git config user.name 2>/dev/null
}
# Outputs the email of the current user
# Usage example: $(git_current_user_email)
function git_current_user_email() {
command git config user.email 2>/dev/null
}
# This is unlikely to change so make it all statically assigned
POST_1_7_2_GIT=$(git_compare_version "1.7.2")
# Clean up the namespace slightly by removing the checker function
unfunction git_compare_version
|
def compare_sum(a, b):
if a + b == 10:
return True
else:
return False |
class GCSProcessor:
def __init__(self):
self.data = {"gcs": {}}
def process_gcs_tags(self, span):
self.data["gcs"]["op"] = span.tags.pop('gcs.op')
self.data["gcs"]["bucket"] = span.tags.pop('gcs.bucket', None)
self.data["gcs"]["object"] = span.tags.pop('gcs.object', None)
self.data["gcs"]["entity"] = span.tags.pop('gcs.entity', None)
self.data["gcs"]["range"] = span.tags.pop('gcs.range', None)
self.data["gcs"]["sourceBucket"] = span.tags.pop('gcs.sourceBucket', None) |
import React from 'react';
import { css } from 'styled-components';
import StyledIcon from 'sharedUI/Icon/StyledIcon';
const spacing = css`
margin-right: 12px;
`;
const AlbumHeaderRight = () => (
<>
<StyledIcon type="FILTER" additionalStyle={spacing} />
<StyledIcon
useImg
type="HEART"
width={31}
height={28}
additionalStyle={`${spacing} ${css`
top: 1px;
`}`}
/>
<StyledIcon
type="DOTS_VERTICAL"
additionalStyle={css`
top: -1px;
`}
/>
</>
);
export default AlbumHeaderRight;
|
/////////////////////////////////////////////////////////////
// CollaboratorRepositoryHibernate.java
// gooru-api
// Created by Gooru on 2014
// Copyright (c) 2014 Gooru. All rights reserved.
// http://www.goorulearning.org/
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
/////////////////////////////////////////////////////////////
package org.ednovo.gooru.infrastructure.persistence.hibernate.collaborator;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.ednovo.gooru.core.api.model.Identity;
import org.ednovo.gooru.core.api.model.Organization;
import org.ednovo.gooru.core.api.model.User;
import org.ednovo.gooru.core.api.model.UserContentAssoc;
import org.ednovo.gooru.infrastructure.persistence.hibernate.BaseRepositoryHibernate;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.type.StandardBasicTypes;
import org.springframework.stereotype.Repository;
@Repository
public class CollaboratorRepositoryHibernate extends BaseRepositoryHibernate implements CollaboratorRepository {
@Override
public UserContentAssoc findCollaboratorById(String gooruOid, String gooruUid) {
String hql = "from UserContentAssoc uc where uc.content.gooruOid=:gooruOid and uc.user.partyUid=:gooruUid order by uc.associationDate desc";
Query query = getSession().createQuery(hql);
query.setParameter("gooruOid", gooruOid);
query.setParameter("gooruUid", gooruUid);
return (UserContentAssoc) ((query.list().size() > 0) ? query.list().get(0) : null);
}
@Override
public List<String> collaboratorSuggest(String text, String gooruUid) {
String hql = "select external_id as mailId from identity i inner join user_content_assoc uc on uc.user_uid = i.user_uid where uc.associated_by_uid=:gooruUid and i.external_id like '" + text.replace("'", "\\") + "%'";
Query query = getSession().createSQLQuery(hql).addScalar("mailId", StandardBasicTypes.STRING);
query.setParameter("gooruUid", gooruUid);
return list(query);
}
@Override
public List<UserContentAssoc> getCollaboratorsById(String gooruOid) {
String hql = "from UserContentAssoc uc where uc.content.gooruOid=:gooruOid ";
Query query = getSession().createQuery(hql);
query.setParameter("gooruOid", gooruOid);
List<UserContentAssoc> userContentAssoc = list(query);
return userContentAssoc.size() > 0 ? userContentAssoc : null;
}
public Long getCollaboratorsCountById(String gooruOid) {
String hql = "select count(*) from UserContentAssoc uc where uc.content.gooruOid=:gooruOid ";
Query query = getSession().createQuery(hql);
query.setParameter("gooruOid", gooruOid);
return (Long) query.list().get(0);
}
@Override
public List<User> findCollaborators(String gooruContentId, String userUid) {
List<User> userList = new ArrayList<User>();
String findCollaborators = "Select u.user_id, u.gooru_uid, u.firstname, u.lastname, i.external_id,u.username, u.organization_uid, u.primary_organization_uid from user u, content c , content_permission p, identity i where gooru_oid = '" + gooruContentId
+ "' and p.permission = 'edit' and u.gooru_uid = i.user_uid and c.content_id = p.content_id and u.gooru_uid = p.party_uid ";
if (userUid != null) {
findCollaborators += " and p.party_uid = '" + userUid + "'";
}
Session session = getSession();
Query query = session.createSQLQuery(findCollaborators).addScalar("user_id", StandardBasicTypes.INTEGER).addScalar("gooru_uid", StandardBasicTypes.STRING).addScalar("firstname", StandardBasicTypes.STRING).addScalar("lastname", StandardBasicTypes.STRING)
.addScalar("external_id", StandardBasicTypes.STRING).addScalar("username", StandardBasicTypes.STRING).addScalar("organization_uid", StandardBasicTypes.STRING).addScalar("primary_organization_uid", StandardBasicTypes.STRING);
List<Object[]> results = arrayList(query);
for (Object[] object : results) {
Set<Identity> idSet = new HashSet<Identity>();
User user = new User();
Identity id = new Identity();
user.setPartyUid((String) object[1]);
user.setUserId((Integer) object[0]);
user.setGooruUId((String) object[1]);
user.setFirstName((String) object[2]);
user.setLastName((String) object[3]);
id.setExternalId((String) object[4]);
user.setUsername((String) object[5]);
String organizationUid = (String) object[6];
if (organizationUid == null) {
organizationUid = (String) object[7];
}
Organization organization = new Organization();
organization.setPartyUid(organizationUid);
user.setOrganization(organization);
idSet.add(id);
user.setIdentities(idSet);
user.setEmailId(id.getExternalId());
userList.add(user);
}
return userList;
}
}
|
<reponame>maxgrabenhorst/messenger4j
package com.github.messenger4j.send.message.template;
import static java.util.Optional.empty;
import com.github.messenger4j.common.ImageAspectRatio;
import com.github.messenger4j.internal.Lists;
import com.github.messenger4j.send.message.template.common.Element;
import java.util.List;
import java.util.Optional;
import lombok.EqualsAndHashCode;
import lombok.NonNull;
import lombok.ToString;
/**
* @author <NAME>
* @since 1.0.0
*/
@ToString(callSuper = true)
@EqualsAndHashCode(callSuper = true)
public final class GenericTemplate extends Template {
private final List<Element> elements;
private final Optional<ImageAspectRatio> imageAspectRatio;
private final Optional<Boolean> sharable;
private GenericTemplate(
List<Element> elements,
Optional<ImageAspectRatio> imageAspectRatio,
Optional<Boolean> sharable) {
super(Type.GENERIC);
this.elements = Lists.immutableList(elements);
this.imageAspectRatio = imageAspectRatio;
this.sharable = sharable;
}
public static GenericTemplate create(@NonNull List<Element> elements) {
return create(elements, empty(), empty());
}
public static GenericTemplate create(
@NonNull List<Element> elements,
@NonNull Optional<ImageAspectRatio> imageAspectRatio,
@NonNull Optional<Boolean> sharable) {
return new GenericTemplate(elements, imageAspectRatio, sharable);
}
public List<Element> elements() {
return elements;
}
public Optional<ImageAspectRatio> imageAspectRatio() {
return imageAspectRatio;
}
public Optional<Boolean> sharable() {
return sharable;
}
}
|
<reponame>huang054/web
package com.springMvc.servlet;
import java.util.Map;
public class ModelAndView {
//页面模板
private String view;
/** Model Map */
//要往页面上带过去的值
private Map<String,Object> model;
public ModelAndView(String view){
this.view = view;
}
public ModelAndView(String view, Map<String,Object> model){
this.view = view;
this.model = model;
}
public String getView() {
return view;
}
public void setView(String view) {
this.view = view;
}
public Map<String, Object> getModel() {
return model;
}
public void setModel(Map<String, Object> model) {
this.model = model;
}
}
|
<reponame>bsutton/IrrigationForPi
package au.org.noojee.irrigation.dao;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import com.pi4j.io.gpio.Pin;
import au.org.noojee.irrigation.entities.EndPoint;
import au.org.noojee.irrigation.entities.GardenBed;
import au.org.noojee.irrigation.entities.GardenFeature;
public class GardenFeatureDao
{
public GardenFeature getById(long id)
{
EntityManager em = EntityManagerProvider.getEntityManager();
return em.find(GardenFeature.class, id);
}
@SuppressWarnings("unchecked")
public List<GardenBed> getAll()
{
EntityManager em = EntityManagerProvider.getEntityManager();
Query query = em.createQuery("SELECT e FROM GardenFeature e");
return (List<GardenBed>) query.getResultList();
}
@SuppressWarnings("unchecked")
public List<EndPoint> getByPin(Pin piPin)
{
EntityManager em = EntityManagerProvider.getEntityManager();
Query query = em
.createQuery("SELECT e FROM EndPoint e where e.pinNo = :pinNo order by LOWER(e.endPointName)");
query.setParameter("pinNo", piPin.getAddress());
return (List<EndPoint>) query.getResultList();
}
public void deleteAll()
{
EntityManager em = EntityManagerProvider.getEntityManager();
Query q2 = em.createQuery("DELETE FROM GardenFeature e");
q2.executeUpdate();
}
public void persist(GardenFeature gardenFeature)
{
EntityManager em = EntityManagerProvider.getEntityManager();
em.persist(gardenFeature);
}
public void delete(GardenFeature gardenFeature)
{
EntityManager em = EntityManagerProvider.getEntityManager();
gardenFeature = em.find(GardenFeature.class, gardenFeature.getId());
em.remove(gardenFeature);
}
public void merge(GardenFeature gardenFeature)
{
EntityManager em = EntityManagerProvider.getEntityManager();
em.merge(gardenFeature);
}
}
|
package calc
/**
* Assume that you are playing a Tic Tac Toe game with an opponent. You are playing as x while your opponent plays as o.
*
* The game is played as follows:
*
* | x| | x|o| x|o| x|o| x|o|a
* | ----- ----- ----- ----- -----
* | | | | | | | o| | o|b|c
* | ----- ----- ----- ----- -----
* | | | | | x| | x| | x|d|e
*
* Find all the possible grid tiles where you can put an x-mark will help you guarantee a win under best play.
*
* (answer is b or e, the question is, can we demonstrate this algorithmically).
* It looks to me like c is a very good move as well, however -- I don't see how o can if I play perfectly from
* initial position (c).
*
*/
object Calc34 extends App {
//noinspection ZeroIndexToHead
case class Board(rows: Seq[Seq[Char]] = for (y <- 1 to 3) yield Seq(' ', ' ', ' ')) {
def set(piece: Char, pos: (Int, Int)): Board = {
require(piece == 'x' || piece == 'o')
Board(rows.updated(pos._2, rows(pos._2).updated(pos._1, piece)))
}
def free(pos: (Int, Int)): Boolean = rows(pos._2)(pos._1) == ' '
def frees: Seq[(Int, Int)] = for {
y <- 0 until 3
x <- 0 until 3 if free((x, y))
} yield (x, y)
def done: Boolean = win.nonEmpty || !rows.exists(row => row.contains(' '))
def win: Option[Char] = {
def equal3[Char](a: Char, b: Char, c: Char): (Boolean, Char) = (a == b && b == c && a != ' ', a)
val acrossTop = equal3(rows(0)(0), rows(0)(1), rows(0)(2))
val acrossMiddle = equal3(rows(1)(0), rows(1)(1), rows(1)(2))
val acrossBottom = equal3(rows(2)(0), rows(2)(1), rows(2)(2))
val downLeft = equal3(rows(0)(0), rows(1)(0), rows(2)(0))
val downMiddle = equal3(rows(0)(1), rows(1)(1), rows(2)(1))
val downRight = equal3(rows(0)(2), rows(1)(2), rows(2)(2))
val diagLeft = equal3(rows(0)(0), rows(1)(1), rows(2)(2))
val diagRight = equal3(rows(0)(2), rows(1)(1), rows(2)(0))
val checks = Seq(acrossTop, acrossMiddle, acrossBottom, downLeft, downMiddle, downRight, diagLeft, diagRight)
checks.find(_._1).map(_._2)
}
def winningMoves(piece: Char): Seq[(Int, Int)] = {
val moves = new scala.collection.mutable.ListBuffer[(Int, Int)]
if (rows(0)(0) == ' ' && rows(0)(1) == piece && rows(0)(2) == piece) moves.append((0, 0))
if (rows(1)(0) == ' ' && rows(1)(1) == piece && rows(1)(2) == piece) moves.append((0, 1))
if (rows(2)(0) == ' ' && rows(2)(1) == piece && rows(2)(2) == piece) moves.append((0, 2))
if (rows(0)(0) == piece && rows(0)(1) == ' ' && rows(0)(2) == piece) moves.append((1, 0))
if (rows(1)(0) == piece && rows(1)(1) == ' ' && rows(1)(2) == piece) moves.append((1, 1))
if (rows(2)(0) == piece && rows(2)(1) == ' ' && rows(2)(2) == piece) moves.append((1, 2))
if (rows(0)(0) == piece && rows(0)(1) == piece && rows(0)(2) == ' ') moves.append((2, 0))
if (rows(1)(0) == piece && rows(1)(1) == piece && rows(1)(2) == ' ') moves.append((2, 1))
if (rows(2)(0) == piece && rows(2)(1) == piece && rows(2)(2) == ' ') moves.append((2, 2))
if (rows(0)(0) == ' ' && rows(1)(0) == piece && rows(2)(0) == piece) moves.append((0, 0))
if (rows(0)(1) == ' ' && rows(1)(1) == piece && rows(2)(1) == piece) moves.append((1, 0))
if (rows(0)(2) == ' ' && rows(1)(2) == piece && rows(2)(2) == piece) moves.append((2, 0))
if (rows(0)(0) == piece && rows(1)(0) == ' ' && rows(2)(0) == piece) moves.append((0, 1))
if (rows(0)(1) == piece && rows(1)(1) == ' ' && rows(2)(1) == piece) moves.append((1, 1))
if (rows(0)(2) == piece && rows(1)(2) == ' ' && rows(2)(2) == piece) moves.append((2, 1))
if (rows(0)(0) == piece && rows(1)(0) == piece && rows(2)(0) == ' ') moves.append((0, 2))
if (rows(0)(1) == piece && rows(1)(1) == piece && rows(2)(1) == ' ') moves.append((1, 2))
if (rows(0)(2) == piece && rows(1)(2) == piece && rows(2)(2) == ' ') moves.append((2, 2))
if (rows(0)(0) == ' ' && rows(1)(1) == piece && rows(2)(2) == piece) moves.append((0, 0))
if (rows(0)(0) == piece && rows(1)(1) == ' ' && rows(2)(2) == piece) moves.append((1, 1))
if (rows(0)(0) == piece && rows(1)(1) == piece && rows(2)(2) == ' ') moves.append((2, 2))
if (rows(2)(0) == ' ' && rows(1)(1) == piece && rows(0)(2) == piece) moves.append((0, 2))
if (rows(2)(0) == piece && rows(1)(1) == ' ' && rows(0)(2) == piece) moves.append((1, 1))
if (rows(2)(0) == piece && rows(1)(1) == piece && rows(0)(2) == ' ') moves.append((2, 0))
moves.toSet.toSeq
}
def blockingMoves(piece: Char): Seq[(Int, Int)] = winningMoves(nextPiece(piece))
def bestMove(piece: Char): (Int, Int) = {
val wins = winningMoves(piece)
if (wins.nonEmpty) wins.head
else {
val blocks = blockingMoves(piece)
if (blocks.nonEmpty) blocks.head
else {
def recurse(piece: Char, board: Board): Seq[Board] = {
if (board.win.nonEmpty) Seq(board)
else if (board.done) Seq.empty
else {
board.frees.flatMap { case (x, y) =>
recurse(nextPiece(piece), board.set(piece, (x, y)))
}
}
}
val outcomes: Seq[((Int, Int), Seq[Board])] = frees.map(pos => pos -> recurse(piece, this.set(piece, pos)))
val wins = outcomes.map { case (pos, games) => (pos, games.count(_.win.contains(piece)))}.sortBy(_._2)
wins.last._1
}
}
}
override def toString = {
s"""
||${rows(0)(0)}|${rows(0)(1)}|${rows(0)(2)}|
||-----|
||${rows(1)(0)}|${rows(1)(1)}|${rows(1)(2)}|
||-----|
||${rows(2)(0)}|${rows(2)(1)}|${rows(2)(2)}|
""".stripMargin
}
}
def nextPiece(piece: Char): Char = piece match {
case 'x' => 'o'
case 'o' => 'x'
}
val initial = Board().set('x', (0, 0)).set('o', (1, 0)).set('o', (0, 1)).set('x', (0, 2))
@scala.annotation.tailrec
def play(boards: Seq[Board], piece: Char): Seq[Board] = {
if (boards.last.done) boards
else {
val curBoard = boards.last
play(boards :+ curBoard.set(piece, curBoard.bestMove(piece)), nextPiece(piece))
}
}
// def allPossibleGames(board: Board, piece: Char): Seq[Seq[Board]] = {
// for (free <- board.frees)
// }
val result = for {
init <- Seq((2, 0), (1, 1), (2, 1), (1, 2), (2, 2))
} yield init -> play(Seq(initial.set('x', init)), 'o')
result.foreach(println)
result.map(x => x._1 -> x._2.map(_.win)).foreach(println)
}
|
package me.minidigger.minicraft.model;
import com.google.common.base.MoreObjects;
import com.google.common.base.Objects;
public class BlockPosition {
private int x, y, z;
public BlockPosition(int x, int y, int z) {
this.x = x;
this.y = y;
this.z = z;
}
public int getX() {
return x;
}
public int getY() {
return y;
}
public int getZ() {
return z;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof BlockPosition)) return false;
BlockPosition that = (BlockPosition) o;
return x == that.x &&
y == that.y &&
z == that.z;
}
@Override
public int hashCode() {
return Objects.hashCode(x, y, z);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("x", x)
.add("y", y)
.add("z", z)
.toString();
}
}
|
<reponame>ferzerkerx/repo-stats
import { GithubCollectorConfig } from './Types';
import { GithubConfig, GithubEntry, GithubService } from '../Types';
import { GithubCollectorService } from './GithubCollectorService';
function testEntry(): GithubEntry {
return {
sha: 'someSha',
createdAt: new Date('2018-12-06'),
additions: 30,
deletions: 6,
author: 'someAuthor',
filename: 'someFileName.txt'
};
}
describe('GithubCollectorsService', () => {
const githubService: GithubService = {
commits: async (githubConfig: GithubConfig): Promise<GithubEntry[]> => {
return [testEntry()];
}
};
const githubCollectorsService: GithubCollectorService = new GithubCollectorService(
githubService
);
it('should fetch githubMetrics', async () => {
const githubCollectorConfig: GithubCollectorConfig = new GithubCollectorConfig(
{
repositoryName: 'someRepoName',
orgName: 'someOrgName',
since: '2018-11-20',
until: '2020-11-20'
}
);
const data = await githubCollectorsService.fetch(githubCollectorConfig);
expect(data).toMatchSnapshot();
});
});
|
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.InputMismatchException;
import java.util.PriorityQueue;
public class Boj1626 {
private static PriorityQueue<Path> pq = new PriorityQueue<>();
private static PriorityQueue<Path> nontouch = new PriorityQueue<>();
private static ArrayList<Node>[] tree;
private static int[][] parent;
private static int[] deep;
private static boolean[] visit;
private static int[] set;
private static int V;
private static class Node {
int node;
int cost;
public Node(int node, int cost){
this.node = node;
this.cost = cost;
}
}
private static class Path implements Comparable<Path>{
int from;
int to;
int cost;
public Path(int from, int to, int cost) {
this.from = from;
this.to = to;
this.cost = cost;
}
@Override
public int compareTo(Path p) {
return this.cost < p.cost ? -1: 1;
}
}
public static void main(String[] args) {
InputReader in = new InputReader(System.in);
V = in.readInt();
int E = in.readInt();
init(V);
while(E-- > 0) {
int x = in.readInt() - 1;
int y = in.readInt() - 1;
int cost = in.readInt();
pq.offer(new Path(x, y, cost));
}
MST();
dfs(0, 0);
connecting();
System.out.println(getResult());
}
private static void init(int N) {
tree = new ArrayList[N];
parent = new int[N][21];
deep = new int[N];
set = new int[N];
visit = new boolean[N];
for(int i = 0; i < N; i++) {
tree[i] = new ArrayList<>();
set[i] = -1;
}
}
private static int find(int x) {
if(set[x] < 0) return x;
return set[x] = find(set[x]);
}
private static boolean merged(int x, int y) {
x = find(x);
y = find(y);
if(x == y) return true;
if(set[x] < set[y]){
set[x] += set[y];
set[y] = x;
}
else{
set[y] += set[x];
set[x] = y;
}
return false;
}
private static int MST() {
int minCost = 0;
while(!pq.isEmpty()) {
Path next = pq.poll();
if(merged(next.from, next.to)){
nontouch.offer(next);
continue;
}
tree[next.from].add(new Node(next.to, next.cost));
tree[next.to].add(new Node(next.from, next.cost));
minCost += next.cost;
}
return minCost;
}
private static void dfs(int current, int depth){
deep[current] = depth;
visit[current] = true;
for(Node next: tree[current]){
if(visit[next.node]) continue;
parent[next.node][0] = current;
dfs(next.node, depth + 1);
}
}
private static void connecting(){
for(int p = 1; p < 21; p++){
for(int cur = 0; cur < V; cur++){
parent[cur][p] = parent[parent[cur][p - 1]][p - 1];
}
}
}
private static int getResult(){
for(int i = 1; i < V; i++){
if(deep[i] == 0) return -1;
}
return 0;
}
private static class InputReader {
private InputStream stream;
private byte[] buf = new byte[1024];
private int curChar;
private int numChars;
private SpaceCharFilter filter;
public InputReader(InputStream stream) {
this.stream = stream;
}
public int read() {
if (numChars == -1) {
throw new InputMismatchException();
}
if (curChar >= numChars) {
curChar = 0;
try {
numChars = stream.read(buf);
} catch (IOException e) {
throw new InputMismatchException();
}
if (numChars <= 0) {
return -1;
}
}
return buf[curChar++];
}
public int readInt() {
int c = read();
while (isSpaceChar(c)) {
c = read();
}
int sgn = 1;
if (c == '-') {
sgn = -1;
c = read();
}
int res = 0;
do {
if (c < '0' || c > '9') {
throw new InputMismatchException();
}
res *= 10;
res += c - '0';
c = read();
} while (!isSpaceChar(c));
return res * sgn;
}
public String readString() {
int c = read();
while (isSpaceChar(c)) {
c = read();
}
StringBuilder res = new StringBuilder();
do {
res.appendCodePoint(c);
c = read();
} while (!isSpaceChar(c));
return res.toString();
}
public double readDouble() {
int c = read();
while (isSpaceChar(c)) {
c = read();
}
int sgn = 1;
if (c == '-') {
sgn = -1;
c = read();
}
double res = 0;
while (!isSpaceChar(c) && c != '.') {
if (c == 'e' || c == 'E') {
return res * Math.pow(10, readInt());
}
if (c < '0' || c > '9') {
throw new InputMismatchException();
}
res *= 10;
res += c - '0';
c = read();
}
if (c == '.') {
c = read();
double m = 1;
while (!isSpaceChar(c)) {
if (c == 'e' || c == 'E') {
return res * Math.pow(10, readInt());
}
if (c < '0' || c > '9') {
throw new InputMismatchException();
}
m /= 10;
res += (c - '0') * m;
c = read();
}
}
return res * sgn;
}
public long readLong() {
int c = read();
while (isSpaceChar(c)) {
c = read();
}
int sgn = 1;
if (c == '-') {
sgn = -1;
c = read();
}
long res = 0;
do {
if (c < '0' || c > '9') {
throw new InputMismatchException();
}
res *= 10;
res += c - '0';
c = read();
} while (!isSpaceChar(c));
return res * sgn;
}
public boolean isSpaceChar(int c) {
if (filter != null) {
return filter.isSpaceChar(c);
}
return c == ' ' || c == '\n' || c == '\r' || c == '\t' || c == -1;
}
public String next() {
return readString();
}
public interface SpaceCharFilter {
public boolean isSpaceChar(int ch);
}
}
}
|
#!/bin/sh
# Description: checks if wicd is running
if [ $(which wicd-client) ]; then
if [ -z $(pgrep wicd-client) ]; then
wicd-client -t &
fi
fi
|
// import React from "react"
// import "./container.scss"
// const Container = children => <div className={props.size}>{children}</div>
// export default Container
|
<reponame>Starlord82/Misc-programs
import pygame
import time
import random
pygame.init()
white = (255, 255, 255)
black = (0, 0, 0)
red = (255,0,0)
blue = (0,0,255)
yellow = (255,255,102)
green = (0,255,0)
dis_width = 800
dis_height = 600
dis = pygame.display.set_mode((dis_width,dis_height))
pygame.display.set_caption('Snake game by ZVIKA')
clock = pygame.time.Clock()
snake_block = 10
snake_speed = 30
font_style = pygame.font.SysFont("bahnschrift", 25)
score_font = pygame.font.SysFont("comicansms", 35)
def snake(snake_block, snake_list):
for x in snake_list:
pygame.draw.rect(dis, black, [x[0], x[1], snake_block, snake_block])
def message(msg,color):
mesg = font_style.render(msg, True, color)
dis.blit(mesg, [0, dis_height/3])
def game_loop():
x1 = dis_width/2
y1 = dis_height/2
x1_change = 0
y1_change = 0
game_over = False
game_close = False
snake_list = []
snake_length = 1
foodx = round(random.randrange(0,dis_width - snake_block)/10)*10
foody = round(random.randrange(0,dis_height - snake_block)/10)*10
direction = ""
while not game_over:
while game_close ==True:
dis.fill(white)
message("You Lost! Press Q-Quit or C-Play Again", red)
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_q:
game_over = True
game_close = False
if event.key == pygame.K_c:
game_loop()
for event in pygame.event.get():
if event.type == pygame.QUIT:
game_over = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
if direction != "right":
x1_change = -snake_block
y1_change = 0
direction = "left"
elif event.key == pygame.K_RIGHT:
if direction != "left":
x1_change = snake_block
y1_change = 0
direction = "right"
elif event.key == pygame.K_UP:
if direction != "down":
x1_change = 0
y1_change = -snake_block
direction = "up"
elif event.key == pygame.K_DOWN:
if direction != "up":
x1_change = 0
y1_change = snake_block
direction = "down"
if x1 >= dis_width or x1 < 0 or y1 >= dis_height or y1 < 0:
game_close = True
x1 += x1_change
y1 += y1_change
dis.fill(blue)
pygame.draw.rect(dis, yellow, [foodx, foody,10,10])
snake_head = []
snake_head.append(x1)
snake_head.append(y1)
snake_list.append(snake_head)
if len(snake_list) > snake_length:
del snake_list[0]
for x in snake_list[:-1]:
if x == snake_head:
game_close = True
snake(snake_block, snake_list)
pygame.display.update()
if x1 == foodx and y1 == foody:
foodx = round(random.randrange(0,dis_width - snake_block)/10)*10
foody = round(random.randrange(0,dis_height - snake_block)/10)*10
snake_length += 1
clock.tick(snake_speed)
pygame.quit()
quit()
game_loop() |
#!/usr/bin/env bash
# Copied from here: https://github.com/Carthage/Carthage/issues/3019#issuecomment-665136323
# carthage.sh
# Usage example: ./carthage.sh build --platform iOS
set -euo pipefail
xcconfig=$(mktemp /tmp/static.xcconfig.XXXXXX)
trap 'rm -f "$xcconfig"' INT TERM HUP EXIT
# For Xcode 12 make sure EXCLUDED_ARCHS is set to arm architectures otherwise
# the build will fail on lipo due to duplicate architectures.
echo 'EXCLUDED_ARCHS__EFFECTIVE_PLATFORM_SUFFIX_simulator__NATIVE_ARCH_64_BIT_x86_64__XCODE_1200 = arm64 arm64e armv7 armv7s armv6 armv8' >> $xcconfig
echo 'EXCLUDED_ARCHS = $(inherited) $(EXCLUDED_ARCHS__EFFECTIVE_PLATFORM_SUFFIX_$(EFFECTIVE_PLATFORM_SUFFIX)__NATIVE_ARCH_64_BIT_$(NATIVE_ARCH_64_BIT)__XCODE_$(XCODE_VERSION_MAJOR))' >> $xcconfig
export XCODE_XCCONFIG_FILE="$xcconfig"
carthage "$@"
|
def is_sorted(arr):
for i in range(len(arr) - 1):
# Compare current element and the next one
if arr[i] > arr[i+1]:
return False
return True |
import type { ISacDigitalResponse } from './api';
type Gender = 'M' | 'F' | 'I';
// export interface ISacDigitalContactFilter extends IContactFilter {
// search?: string;
// email?: string;
// }
export interface ISacDigitalContact {
id: string;
/** phone number */
number: string;
name: string;
/** image url */
avatar: string;
// status: null;
carrier: string;
region: string;
gender: Gender;
channel: {
id: string;
/** channel phone number */
number: string;
};
email: string;
socialMedias: {
facebook: string;
twitter: string;
instagram: string;
linkedin: string;
pinterest: string;
youtube: string;
google: string;
};
tag: [];
binded: {
current: null;
comunication: [
{
id: string;
name: string;
/** format YYYY-MM-DD HH:mm:ss*/
bindedAt: string;
},
];
};
observation: string;
blocked: boolean;
portable: boolean;
portableCarrier: string;
portableDate: string;
imported: boolean;
/** format YYYY-MM-DD HH:mm:ss*/
impotedAt: string;
/** format YYYY-MM-DD HH:mm:ss*/
createdAt: string;
}
export interface ISacDigitalResponseContacts extends ISacDigitalResponse {
total: number;
page: number;
list: ISacDigitalContact[];
}
|
<gh_stars>1-10
package com.twelvemonkeys.servlet.cache;
import com.twelvemonkeys.lang.Validate;
import java.net.URI;
/**
* AbstractCacheRequest
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @author last modified by $Author: haku $
* @version $Id: AbstractCacheRequest.java#1 $
*/
public abstract class AbstractCacheRequest implements CacheRequest {
private final URI requestURI;
private final String method;
protected AbstractCacheRequest(final URI pRequestURI, final String pMethod) {
requestURI = Validate.notNull(pRequestURI, "requestURI");
method = Validate.notNull(pMethod, "method");
}
public URI getRequestURI() {
return requestURI;
}
public String getMethod() {
return method;
}
// TODO: Consider overriding equals/hashcode
@Override
public String toString() {
return new StringBuilder(getClass().getSimpleName())
.append("[URI=").append(requestURI)
.append(", parameters=").append(getParameters())
.append(", headers=").append(getHeaders())
.append("]").toString();
}
}
|
// Copyright 2019 Wason Technology, LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Author: <NAME> (<EMAIL>)
// Date: 12/10/2019
///<reference path="@babylonjs\babylon.module.d.ts" />
// tsc tesseract_viewer.ts --lib es6,DOM -m es2015 -target es6
import { _BabylonLoaderRegistered, SceneComponentConstants, FreeCameraTouchInput } from "babylonjs";
class TesseractViewer {
private _canvas: HTMLCanvasElement;
private _engine: BABYLON.Engine;
private _scene: BABYLON.Scene;
private _camera: BABYLON.ArcRotateCamera;
private _light: BABYLON.Light;
private _environment: BABYLON.EnvironmentHelper;
private _root: BABYLON.TransformNode;
private _joint_trajectory: JointTrajectoryAnimation;
private _scene_etag = null;
private _trajectory_etag= null;
constructor(canvasElement : string) {
// Create canvas and engine.
this._canvas = document.getElementById(canvasElement) as HTMLCanvasElement;
this._engine = new BABYLON.Engine(this._canvas, true);
}
async createScene() : Promise<void> {
// Create the scene space
this._scene = new BABYLON.Scene(this._engine);
//scene.clearColor = new BABYLON.Color3(.4, .4, .4);
this._scene.useRightHandedSystem=true;
this._environment = this._scene.createDefaultEnvironment({ enableGroundShadow: true, groundYBias: 0 });
this._environment.setMainColor(BABYLON.Color3.FromHexString("#74b9ff"));
// Add a camera to the scene and attach it to the canvas
this._camera = new BABYLON.ArcRotateCamera("Camera", Math.PI / 2, Math.PI / 2, 2, new BABYLON.Vector3(0,1,0), this._scene);
this._camera.attachControl(this._canvas, true);
this._camera.setPosition(new BABYLON.Vector3(2.5, 1.5, -1));
// Add lights to the scene
this._light = new BABYLON.HemisphericLight("light1", new BABYLON.Vector3(0, -1, 0), this._scene);
this._root = new BABYLON.TransformNode("root0");
this._root.rotation.x = -1.5707963267948966;
await this.updateScene();
console.log("Loaded!");
this._scene.transformNodes.forEach(function (tf)
{
//this._addAxis(tf, 0.5);
//console.log(tf)
});
//console.log(this._scene.transformNodes);
this._scene.meshes.forEach(function (m)
{
try
{
m.createNormals(true);
}
catch (e)
{
console.log(e)
}
//m.parent=root;
//addAxis(tf, 0.5);
});
this.enableVR();
let _this = this;
setTimeout(() => _this.updateTrajectory(),2000);
//this._scene.debugLayer.show();
}
doRender() : void {
// Run the render loop.
this._engine.runRenderLoop(() => {
this._scene.render();
});
// The canvas/window resize event handler.
window.addEventListener('resize', () => {
this._engine.resize();
});
}
addAxis(parent: BABYLON.Node, size: number) : void {
var axisX = BABYLON.Mesh.CreateLines("axisX", [
BABYLON.Vector3.Zero(), new BABYLON.Vector3(size, 0, 0), new BABYLON.Vector3(size * 0.95, 0.05 * size, 0),
new BABYLON.Vector3(size, 0, 0), new BABYLON.Vector3(size * 0.95, -0.05 * size, 0)
], this._scene);
axisX.color = new BABYLON.Color3(1, 0, 0);
axisX.parent = parent;
//var xChar = makeTextPlane("X", "red", size / 10);
//xChar.position = new BABYLON.Vector3(0.9 * size, -0.05 * size, 0);
var axisY = BABYLON.Mesh.CreateLines("axisY", [
BABYLON.Vector3.Zero(), new BABYLON.Vector3(0, size, 0), new BABYLON.Vector3(-0.05 * size, size * 0.95, 0),
new BABYLON.Vector3(0, size, 0), new BABYLON.Vector3(0.05 * size, size * 0.95, 0)
], this._scene);
axisY.color = new BABYLON.Color3(0, 1, 0);
axisY.parent = parent;
//var yChar = makeTextPlane("Y", "green", size / 10);
//yChar.position = new BABYLON.Vector3(0, 0.9 * size, -0.05 * size);
var axisZ = BABYLON.Mesh.CreateLines("axisZ", [
BABYLON.Vector3.Zero(), new BABYLON.Vector3(0, 0, size), new BABYLON.Vector3(0, -0.05 * size, size * 0.95),
new BABYLON.Vector3(0, 0, size), new BABYLON.Vector3(0, 0.05 * size, size * 0.95)
], this._scene);
axisZ.color = new BABYLON.Color3(0, 0, 1);
axisZ.parent = parent;
//var zChar = makeTextPlane("Z", "blue", size / 10);
//zChar.position = new BABYLON.Vector3(0, 0.05 * size, 0.9 * size);
}
enableVR(): void
{
// Enable VR
var vrHelper = this._scene.createDefaultVRExperience({createDeviceOrientationCamera:false});
var ground = BABYLON.Mesh.CreateGround("ground", 6, 6, 2, this._scene);
vrHelper.enableTeleportation({floorMeshName: "ground"});
ground.visibility = 0.1;
//vrHelper.enableTeleportation({floorMeshes: [environment.ground]});
}
async updateScene(): Promise<void>
{
let fetch_res: Response;
try
{
fetch_res = await fetch("tesseract_scene.babylon", {method: "HEAD"});
}
catch
{
let _this = this;
setTimeout(() => _this.updateScene(), 1000);
return;
}
let etag = fetch_res.headers.get('etag');
if (etag !== null)
{
if (this._scene_etag !== null)
{
if (this._scene_etag != etag)
{
location.reload();
return;
}
else
{
let _this = this;
setTimeout(() => _this.updateScene(), 1000);
return;
}
}
}
await BABYLON.SceneLoader.AppendAsync("./", "tesseract_scene.babylon", this._scene);
if (etag !== null)
{
this._scene_etag = etag;
let _this = this;
setTimeout(() => _this.updateScene(), 1000);
}
}
async updateTrajectory(): Promise<void>
{
let fetch_res: Response;
let _this = this;
try
{
fetch_res = await fetch("tesseract_trajectory.json", {method: "HEAD"});
}
catch
{
setTimeout(() => _this.updateTrajectory(), 1000);
return;
}
if (!fetch_res.ok)
{
setTimeout(() => _this.updateTrajectory(), 1000);
return;
}
let etag = fetch_res.headers.get('etag');
if (etag == null || this._trajectory_etag == etag)
{
console.log("No updated trajectory");
setTimeout(() => _this.updateTrajectory(), 1000);
return;
}
try
{
if (this._joint_trajectory !== null)
{
this._joint_trajectory.stop();
this._joint_trajectory = null;
}
}
catch {}
try
{
let trajectory_response = await fetch("./tesseract_trajectory.json");
let trajectory_json = await trajectory_response.json();
this._joint_trajectory = JointTrajectoryAnimation.Parse(trajectory_json, this._scene);
this._joint_trajectory.start();
}
catch (e)
{
console.log("Trajectory not available");
console.log(e);
}
if (etag !== null)
{
this._trajectory_etag = etag;
setTimeout(() => _this.updateTrajectory(), 1000);
}
}
}
class JointTrajectoryAnimation
{
private _joint_names : string[];
private _use_time: boolean;
private _loop_time: number;
private _trajectory: number[][];
private _scene: BABYLON.Scene;
private _joints: Map<string,BABYLON.TransformNode>;
private _joint_axes: Map<string,BABYLON.Vector3>;
private _joint_type: Map<string,number>;
private _max_time: number;
private _t0: number;
private _timerid = 0;
public constructor(scene: BABYLON.Scene, joint_names: string[],
trajectory: number[][], use_time: boolean, loop_time: number)
{
if (joint_names.length == 0)
{
throw new Error("joint_names must not be zero count");
}
if (trajectory.length == 0)
{
throw new Error("trajectory must not be zero count");
}
this._max_time = -1;
trajectory.forEach( (t) =>
{
if (use_time)
{
if (t.length-1 != joint_names.length)
{
throw new Error("Trajectory waypoints must have same count as joint_names")
}
let waypoint_time = t.slice(-1)[0];
if (this._max_time >= waypoint_time)
{
throw new Error("Trajectory waypoint time must me monotonically increasing");
}
this._max_time = waypoint_time;
}
else
{
if (t.length != joint_names.length)
{
throw new Error("Trajectory waypoints must have same count as joint_names")
}
}
});
this._joint_names = joint_names;
this._trajectory = trajectory;
this._use_time = use_time;
this._loop_time = loop_time;
this._scene = scene;
this.findJoints();
}
private findJoints() : void
{
let joints = new Map<string,BABYLON.TransformNode>();
let axes = new Map<string,BABYLON.Vector3>();
let type = new Map<string,number>();
this._joint_names.forEach((joint_name) => {
let tf = this._scene.getTransformNodeByName("joint_" + joint_name);
let metadata = tf.metadata;
if (metadata.hasOwnProperty("tesseract_joint")
&& metadata.tesseract_joint.hasOwnProperty("axis") )
{
joints.set(joint_name, tf);
let axis_array = tf.metadata.tesseract_joint.axis;
axes.set(joint_name, new BABYLON.Vector3(axis_array[0], axis_array[1], axis_array[2]));
type.set(joint_name, tf.metadata.tesseract_joint.type);
}
});
this._joints = joints;
this._joint_axes = axes;
this._joint_type = type;
}
public resetJointPos() : void
{
this._joints.forEach((tf) => {
tf.position = new BABYLON.Vector3(0,0,0);
tf.rotationQuaternion = new BABYLON.Quaternion(0,0,0,1);
});
}
public getMaxTime() : number
{
if (this._use_time)
{
return this._max_time;
}
else
{
return this._loop_time;
}
}
public setTrajectoryTime(t: number) : void
{
let joint_n = this._joint_names.length;
let n = this._trajectory.length;
let times = [];
for (let i=0; i<n; i++)
{
if (this._use_time)
{
times.push(this._trajectory[i][joint_n])
}
else
{
times.push(i*(this._loop_time/n));
}
}
let joint_pos : number[] = null;
for (let i = 0; i<n-1; i++)
{
if (times[i] == t)
{
joint_pos = this._trajectory[i].slice(0,joint_n);
break;
}
if (times[i] < t)
{
let joint_pos1 = this._trajectory[i].slice(0,joint_n);
let joint_pos2 = this._trajectory[i+1].slice(0,joint_n);
let t1 = times[i]
let t2 = times[i+1]
joint_pos = []
for (let j=0; j<joint_n; j++)
{
joint_pos.push(joint_pos1[j] + ((joint_pos2[j] - joint_pos1[j])/(t2-t1))*(t-t1));
}
}
}
if (joint_pos === null)
{
joint_pos = this._trajectory.slice(-1)[0].slice(0,joint_n);
}
for (let i = 0; i<joint_n; i++)
{
let joint_name = this._joint_names[i];
let joint = this._joints.get(joint_name);
let axes = this._joint_axes.get(joint_name)
let type = this._joint_type.get(joint_name)
if (type == 2)
{
joint.position = axes.scale(joint_pos[i]);
}
else
{
joint.rotationQuaternion = new BABYLON.Quaternion(0,0,0,1);
joint.rotate(axes, joint_pos[i], BABYLON.Space.LOCAL);
}
}
}
public start(): void
{
if (this._timerid != 0)
{
return;
}
this._t0 = new Date().getTime()/1000;
var _this = this;
this._timerid = setInterval(() => _this.intervalCallback(),50);
}
public stop(): void
{
if (this._timerid == 0)
{
return;
}
clearInterval(this._timerid);
this._timerid = 0;
}
private intervalCallback(): void
{
let max_t = this.getMaxTime();
let t_total = new Date().getTime()/1000 - this._t0;
let t = t_total % max_t;
this.setTrajectoryTime(t)
}
public static Parse(parsedTrajectory: any, scene: BABYLON.Scene)
{
let trajectory = new JointTrajectoryAnimation(scene,
parsedTrajectory.joint_names, parsedTrajectory.trajectory,
parsedTrajectory.use_time, parsedTrajectory.loop_time);
return trajectory;
}
}
window.addEventListener('DOMContentLoaded', async function() {
// Create the game using the 'renderCanvas'.
let viewer = new TesseractViewer('renderCanvas');
// Create the scene.
await viewer.createScene();
// Start render loop.
viewer.doRender();
}); |
<reponame>achamorr/wikiwiz<filename>js/highlight-parse.js
// =======================================================
// File: highlight-parse.py
// Description: Get queryTerms from webpage via user highlight action.
// Created: 09.07.2019
// Updated:
// Author: <NAME>
// =======================================================
|
<gh_stars>0
package ch.bernmobil.vibe.shared.contract;
/**
* Database contract to define table name and column name for {@link ch.bernmobil.vibe.shared.entity.CalendarDate}.
*
* @author <NAME>
* @author <NAME>
*/
@SuppressWarnings("ALL")
public final class CalendarDateContract {
public static final String TABLE_NAME = "calendar_date";
public static final String ID = "id";
public static final String VALID_FROM = "valid_from";
public static final String VALID_UNTIL = "valid_until";
public static final String JOURNEY = "journey";
public static final String DAYS = "days";
public static final String UPDATE = "update";
public static final String[] COLUMNS = {ID, VALID_FROM, VALID_UNTIL, JOURNEY, DAYS, UPDATE};
private CalendarDateContract(){}
}
|
#!/bin/sh -x
# usage:
# [MACHINE=x86_64] sh vmrun.sh import_[qemu | lxc] [GUEST]
# sh vmrun.sh run_virsh [GUEST]
# or
# [MACHINE=x86_64] sh vmrun.sh run_qemu [GUEST]
# or
# sh vmrun.sh run_bhyve [GUEST]
#
# example ([defaults]):
# [MACHINE=x86_64] sh vmrun.sh run_qemu [freebsd-x86_64-zfs]
STORAGE_DIR=${STORAGE_DIR:-$(dirname $0)} ; IMGFMT=${IMGFMT:-qcow2}
MACHINE=${MACHINE:-x86_64}
BHYVE_X64_FIRMWARE=${BHYVE_X64_FIRMWARE:-/usr/local/share/uefi-firmware/BHYVE_UEFI_CODE.fd}
QEMU_X64_FIRMWARE=${QEMU_X64_FIRMWARE:-/usr/share/OVMF/OVMF_CODE.fd}
QEMU_AA64_FIRMWARE=${QEMU_AA64_FIRMWARE:-/usr/share/AAVMF/AAVMF_CODE.fd}
#-------- create Vagrant ; use box image --------
box_vagrant() {
GUEST=${1:-freebsd-${MACHINE}-zfs} ; PROVIDER=${PROVIDER:-libvirt}
author=${author:-thebridge0491} ; datestamp=${datestamp:-`date +"%Y.%m.%d"`}
if [ ! -e ${STORAGE_DIR}/metadata.json ] ; then
if [ "libvirt" = "${PROVIDER}" ] ; then
echo '{"provider":"libvirt","virtual_size":30,"format":"qcow2"}' > \
${STORAGE_DIR}/metadata.json ;
elif [ "bhyve" = "${PROVIDER}" ] ; then
echo '{"provider":"bhyve","virtual_size":30,"format":"raw"}' > \
${STORAGE_DIR}/metadata.json ;
fi ;
fi
if [ ! -e ${STORAGE_DIR}/info.json ] ; then
cat << EOF > ${STORAGE_DIR}/info.json ;
{
"Author": "${author} <${author}-codelab@yahoo.com>",
"Repository": "https://bitbucket.org/${author}/vm_templates_sh.git",
"Description": "Virtual machine templates (QEMU x86_64[, aarch64]) using auto install methods and/or chroot install scripts"
}
EOF
fi
if [ ! -e ${STORAGE_DIR}/Vagrantfile ] ; then
if [ "libvirt" = "${PROVIDER}" ] ; then
cat << EOF > ${STORAGE_DIR}/Vagrantfile ;
## minimal contents
#Vagrant.configure("2") do |config|
# config.vm.provider :libvirt do |p|
# p.driver = 'kvm'
# end
#end
# custom contents
# -*- mode: ruby -*-
# vi: set ft=ruby :
Vagrant.configure(2) do |config|
config.ssh.shell = 'sh'
config.vm.boot_timeout = 1800
config.vm.synced_folder '.', '/vagrant', disabled: true
config.vm.provider :libvirt do |p, override|
p.driver = 'kvm'
p.cpus = 2
p.memory = 2048
p.video_vram = 64
p.video_type = 'qxl'
p.disk_bus = 'virtio'
p.nic_model_type = 'virtio'
end
end
EOF
elif [ "bhyve" = "${PROVIDER}" ] ; then
cat << EOF > ${STORAGE_DIR}/Vagrantfile ;
## minimal contents
#Vagrant.configure("2") do |config|
# config.vm.provider :bhyve do |p|
# p.cpus = 1
# p.memory = 1024
# end
#end
# custom contents
# -*- mode: ruby -*-
# vi: set ft=ruby :
Vagrant.configure(2) do |config|
config.ssh.shell = 'sh'
config.vm.boot_timeout = 1800
config.vm.synced_folder '.', '/vagrant', disabled: true
config.vm.provider :bhyve do |p, override|
p.cpus = 2
p.memory = 2048
end
end
EOF
fi
fi
if [ "libvirt" = "${PROVIDER}" ] ; then
IMGFILE=${IMGFILE:-${GUEST}.${IMGFMT}} ;
#mv ${STORAGE_DIR}/${IMGFILE} ${STORAGE_DIR}/box.img ;
qemu-img convert -f qcow2 -O qcow2 ${STORAGE_DIR}/${IMGFILE} \
${STORAGE_DIR}/box.img ;
if [ "aarch64" = "${MACHINE}" ] ; then
cp ${QEMU_AA64_FIRMWARE} ${STORAGE_DIR}/ ;
else
cp ${QEMU_X64_FIRMWARE} ${STORAGE_DIR}/ ;
fi ;
elif [ "bhyve" = "${PROVIDER}" ] ; then
IMGFILE=${IMGFILE:-${GUEST}.raw}
mv ${STORAGE_DIR}/${IMGFILE} ${STORAGE_DIR}/box.img ;
cp ${BHYVE_X64_FIRMWARE} ${STORAGE_DIR}/ ;
fi
(cd ${STORAGE_DIR} ; tar -cvzf ${GUEST}-${datestamp}.${PROVIDER}.box metadata.json info.json Vagrantfile `ls vmrun* *_CODE.fd` box.img)
if command -v erb > /dev/null ; then
erb author=${author} guest=${GUEST} datestamp=${datestamp} \
${STORAGE_DIR}/catalog.json.erb > ${STORAGE_DIR}/${GUEST}_catalog.json ;
elif command -v pystache > /dev/null ; then
pystache ${STORAGE_DIR}/catalog.json.mustache "{
\"author\":\"${author}\",
\"guest\":\"${GUEST}\",
\"datestamp\":\"${datestamp}\"
}" > ${STORAGE_DIR}/${GUEST}_catalog.json ;
elif command -v mustache > /dev/null ; then
cat << EOF >> mustache - ${STORAGE_DIR}/catalog.json.mustache > ${STORAGE_DIR}/${GUEST}_catalog.json ;
---
author: ${author}
guest: ${GUEST}
datestamp: ${datestamp}
---
EOF
fi
}
diff_qemuimage() {
GUEST=${1:-freebsd-${MACHINE}-zfs} ; BOXPREFIX=${BOXPREFIX:-${GUEST}}
qemu-img create -f qcow2 -o backing_file=$(cd ${STORAGE_DIR} ; find ${BOXPREFIX}*libvirt.box -name box.img | tail -n1) \
${STORAGE_DIR}/${GUEST}.${IMGFMT}
echo ''
qemu-img info --backing-chain ${STORAGE_DIR}/${GUEST}.${IMGFMT} ; sleep 5
}
revert_backingimage() {
GUEST=${1:-freebsd-${MACHINE}-zfs} ; BOXPREFIX=${BOXPREFIX:-${GUEST}}
backing_file=$(cd ${STORAGE_DIR} ; qemu-img info --backing-chain ${GUEST}.${IMGFMT} | sed -n 's|backing file:[ ]*\(.*\)$|\1|p')
mv ${STORAGE_DIR}/${GUEST}.${IMGFMT} ${STORAGE_DIR}/${GUEST}.${IMGFMT}.bak
sync ; (cd ${STORAGE_DIR} ; cp ${backing_file} ${GUEST}.${IMGFMT}) ; sync
(cd ${STORAGE_DIR} ; qemu-img rebase -b ${GUEST}.${IMGFMT} \
${STORAGE_DIR}/${GUEST}.${IMGFMT}.bak ; sync)
qemu-img commit ${STORAGE_DIR}/${GUEST}.${IMGFMT}.bak ; sync
rm ${STORAGE_DIR}/${GUEST}.${IMGFMT}.bak ; sync
qemu-img info --backing-chain ${STORAGE_DIR}/${GUEST}.${IMGFMT} ; sleep 5
}
#------------------------------------------------
#-------------- using virtinst ------------------
import_lxc() {
GUEST=${1:-devuan-boxe0000}
CONNECT_OPT=${CONNECT_OPT:---connect lxc:///}
virt-install ${CONNECT_OPT} --init /sbin/init --memory 768 --vcpus 1 \
--controller virtio-serial --console pty,target_type=virtio \
--network network=default,model=virtio-net,mac=RANDOM --boot menu=on \
${VIRTFS_OPTS:---filesystem type=mount,mode=passthrough,source=/mnt/Data0,target=9p_Data0} \
--filesystem $HOME/.local/share/lxc/${GUEST}/rootfs,/ -n ${GUEST} &
sleep 10 ; virsh ${CONNECT_OPT} ttyconsole ${GUEST}
#sleep 5 ; virsh ${CONNECT_OPT} dumpxml ${GUEST} > $HOME/.local/share/lxc/${GUEST}.xml
}
import_qemu() {
GUEST=${1:-freebsd-${MACHINE}-zfs} ; IMGFILE=${IMGFILE:-${GUEST}.${IMGFMT}}
CONNECT_OPT=${CONNECT_OPT:---connect qemu:///system}
VUEFI_OPTS=${VUEFI_OPTS:---boot uefi}
virt-install ${CONNECT_OPT} --arch ${MACHINE} --memory 2048 --vcpus 2 \
--controller usb,model=ehci --controller virtio-serial \
--console pty,target_type=virtio --graphics vnc,port=-1 \
--network network=default,model=virtio-net,mac=RANDOM \
--boot menu=on,cdrom,hd --controller scsi,model=virtio-scsi \
${VIRTFS_OPTS:---filesystem type=mount,mode=passthrough,source=/mnt/Data0,target=9p_Data0} \
--disk path=${STORAGE_DIR}/${IMGFILE},cache=writeback,discard=unmap,detect_zeroes=unmap,bus=scsi \
${VUEFI_OPTS} -n ${GUEST} --import &
sleep 30 ; virsh ${CONNECT_OPT} vncdisplay ${GUEST}
#sleep 5 ; virsh ${CONNECT_OPT} dumpxml ${GUEST} > ${STORAGE_DIR}/${GUEST}.xml
}
run_virsh() {
GUEST=${1:-freebsd-${MACHINE}-zfs}
CONNECT_OPT=${CONNECT_OPT:---connect qemu:///system}
## NOTE, to convert qemu-system args to libvirt domain XML:
# eval "echo \"$(< vmrun_qemu.args)\"" > /tmp/run_qemu.args
# virsh ${CONNECT_OPT} domxml-from-native qemu-argv /tmp/run_qemu.args
virsh ${CONNECT_OPT} start ${GUEST}
sleep 10 ; virsh ${CONNECT_OPT} vncdisplay ${GUEST} ; sleep 5
virt-viewer ${CONNECT_OPT} ${GUEST} &
}
#------------------------------------------------
#---------------- using bhyve -------------------
run_bhyve() {
printf "%40s\n" | tr ' ' '#'
echo '### Warning: FreeBSD bhyve currently requires root/sudo permission ###'
printf "%40s\n\n" | tr ' ' '#' ; sleep 5
GUEST=${1:-freebsd-${MACHINE}-zfs} ; IMGFILE=${IMGFILE:-${GUEST}.raw}
BUEFI_OPTS=${BUEFI_OPTS:--s 29,fbuf,tcp=0.0.0.0:${VNCPORT:-5901},w=1024,h=768 \
-s 30,xhci,tablet -l bootrom,${BHYVE_X64_FIRMWARE}}
bhyve -A -H -P -c 2 -m 2048M -l com1,stdio -s 0,hostbridge -s 1,lpc \
-s 2,virtio-net,${NET_OPT:-tap0},mac=52:54:00:$(openssl rand -hex 3 | sed 's|\(..\)|\1:|g; s|:$||') \
-s 3,virtio-blk,${STORAGE_DIR}/${IMGFILE} \
${BUEFI_OPTS} ${VIRTFS_OPTS:--s 4,virtio-9p,9p_Data0=/mnt/Data0} \
${GUEST} &
vncviewer :${VNCPORT:-5901} &
#ls -al /dev/vmm # list running VMs
#bhyvectl --destroy --vm=${GUEST}
}
#------------------------------------------------
#------------ using qemu-system-* ---------------
run_qemu() {
GUEST=${1:-freebsd-${MACHINE}-zfs} ; IMGFILE=${IMGFILE:-${GUEST}.${IMGFMT}}
${VIRTFS_OPTS:--virtfs local,id=fsdev0,path=/mnt/Data0,mount_tag=9p_Data0,security_model=passthrough}
if [ "aarch64" = "${MACHINE}" ] ; then
QUEFI_OPTS=${QUEFI_OPTS:-"-smbios type=0,uefi=on -bios ${QEMU_AA64_FIRMWARE}"}
qemu-system-aarch64 -cpu cortex-a57 -machine virt,gic-version=3,accel=kvm:hvf:tcg \
-smp cpus=2 -m size=2048 -boot order=cd,menu=on -name ${GUEST} \
-nic ${NET_OPT:-bridge,br=br0},id=net0,model=virtio-net-pci,mac=52:54:00:$(openssl rand -hex 3 | sed 's|\(..\)|\1:|g; s|:$||') \
-device qemu-xhci,id=usb -usb -device usb-kbd -device usb-tablet \
-device virtio-blk-pci,drive=hd0 \
-drive file=${STORAGE_DIR}/${IMGFILE},cache=writeback,discard=unmap,detect-zeroes=unmap,if=none,id=hd0,format=qcow2 \
-display default,show-cursor=on -vga none -device virtio-gpu-pci \
${QUEFI_OPTS} ${VIRTFS_OPTS} &
else
QUEFI_OPTS=${QUEFI_OPTS:-"-smbios type=0,uefi=on -bios ${QEMU_X64_FIRMWARE}"}
qemu-system-x86_64 -machine q35,accel=kvm:hvf:tcg \
-global PIIX4_PM.disable_s3=1 -global PIIX4_PM.disable_s4=1 \
-smp cpus=2 -m size=2048 -boot order=cd,menu=on -name ${GUEST} \
-nic ${NET_OPT:-bridge,br=br0},id=net0,model=virtio-net-pci,mac=52:54:00:$(openssl rand -hex 3 | sed 's|\(..\)|\1:|g; s|:$||') \
-device qemu-xhci,id=usb -usb -device usb-kbd -device usb-tablet \
-device virtio-scsi-pci,id=scsi0 -device scsi-hd,drive=hd0 \
-drive file=${STORAGE_DIR}/${IMGFILE},cache=writeback,discard=unmap,detect-zeroes=unmap,if=none,id=hd0,format=qcow2 \
-display default,show-cursor=on \
${QUEFI_OPTS} ${VIRTFS_OPTS} &
fi
}
#------------------------------------------------
#------------------------------------------------
${@:-run_qemu freebsd-x86_64-zfs}
|
#!/bin/bash
set -e
if [ -z "$1" ]; then
source_list=/dev/stdin
dest_list=/dev/stdout
else
source_list="$1"
dest_list="$1"
fi
# Load the file
readarray A < "$source_list"
# Sort
IFS=$'\n'
A=( $(LC_COLLATE=C sort -f <<< "${A[*]}") )
A=( $(uniq <<< "${A[*]}") )
unset IFS
# Dump array back into the file
printf '%s\n' "${A[@]}" > "$dest_list"
|
#!/bin/sh
#
# Variables
#
SCRIPT=$(readlink -f "$0")
DIR="$(dirname $SCRIPT)"
DIR_TESTS="$(dirname $(dirname $DIR))"
DIR_LIBRARY="${DIR_TESTS}/lib"
DIR_RESOURCES="${DIR_TESTS}/resources"
DIR_TARGET="${DIR_TESTS}/target"
#
# Tests
#
. $DIR_LIBRARY/testbase.sh
. $DIR_LIBRARY/functions.sh
#
# Test Runner
#
(
rm -rf $DIR_TARGET
mkdir -p $DIR_TARGET
(
RESULT=$(install)
assertNotEquals "cannot install to image" 0 $?
)
(
RESULT=$(tex_version)
assertEquals "tex is installed" 0 $?
)
(
RESULT=$(small2e)
assertEquals "a small2e tex is built" 0 $?
)
(
RESULT=$(sample2e)
assertEquals "a sample2e tex is built" 0 $?
)
(
RESULT=$(scheme_version)
assertEquals "scheme tex is tested" 0 $?
)
) |
#!/bin/sh
sed -i "s/-s aufs/-s overlay2/g" /lib/systemd/system/balena.service; \
if [ -f /etc/systemd/system/balena.service.d/balena.conf ]; then \
sed -i "s/-s aufs/-s overlay2/g" /etc/systemd/system/balena.service.d/balena.conf; \
else \
echo "No drop-in found. Most probably you don't run a development image."; \
fi
|
<filename>dist/command/Generate.d.ts
import { Command } from 'commander';
export declare function GenerateCommand(cmd: Command): Promise<void>;
|
<reponame>bpbpublications/Building-Server-side-and-Microservices-with-Go
package main
import "fmt"
type Rectangle struct {
a, b int
}
func (r *Rectangle) Area() int {
return r.a * r.b
}
func main() {
r := Rectangle{2, 3}
fmt.Println(r.Area())
}
|
#!/bin/sh
# Tmux script that sets up a simnet mining harness.
set -e
SESSION="harness"
NODES_ROOT=~/harness
RPC_USER="user"
RPC_PASS="pass"
MASTER_WALLET_SEED="b280922d2cffda44648346412c5ec97f429938105003730414f10b01e1402eac"
VOTING_WALLET_SEED="aabbcaabbcaabbcaabbcaabbcaabbcaabbcaabbcaabbcaabbcaabbcaabbcaabbc"
WALLET_PASS=123
BACKUP_PASS=b@ckUp
SOLO_POOL=0
MAX_GEN_TIME=20
MINER_MAX_PROCS=1
PAYMENT_METHOD="pplns"
LAST_N_PERIOD=300 # PPLNS range, 5 minutes.
GUI_DIR="${NODES_ROOT}/gui"
CPU_MINING_ADDR="SsiuwSRYvH7pqWmRxFJWR8Vmqc3AWsjmK2Y"
POOL_MINING_ADDR="SspUvSyDGSzvPz2NfdZ5LW15uq6rmuGZyhL"
PFEE_ADDR="SsVPfV8yoMu7AvF5fGjxTGmQ57pGkaY6n8z"
CLIENT_ONE_ADDR="SsZckVrqHRBtvhJA5UqLZ3MDXpZHi5mK6uU"
CLIENT_TWO_ADDR="Ssn23a3rJaCUxjqXiVSNwU6FxV45sLkiFpz"
if [ -d "${NODES_ROOT}" ]; then
rm -R "${NODES_ROOT}"
fi
echo "Writing node config files"
mkdir -p "${NODES_ROOT}/master"
mkdir -p "${NODES_ROOT}/vnode"
mkdir -p "${NODES_ROOT}/mwallet"
mkdir -p "${NODES_ROOT}/vwallet"
mkdir -p "${NODES_ROOT}/pool"
mkdir -p "${NODES_ROOT}/gui"
mkdir -p "${NODES_ROOT}/c1"
mkdir -p "${NODES_ROOT}/c2"
cp -r gui/assets ${GUI_DIR}/assets
cat > "${NODES_ROOT}/c1/client.conf" <<EOF
debuglevel=trace
activenet=simnet
user=m1
address=${CLIENT_ONE_ADDR}
pool=127.0.0.1:5550
maxprocs=${MINER_MAX_PROCS}
profile=:6061
EOF
cat > "${NODES_ROOT}/c2/client.conf" <<EOF
debuglevel=trace
activenet=simnet
user=m2
address=${CLIENT_TWO_ADDR}
pool=127.0.0.1:5550
maxprocs=${MINER_MAX_PROCS}
profile=:6062
EOF
cat > "${NODES_ROOT}/master/dcrmctl.conf" <<EOF
rpcuser=${RPC_USER}
rpcpass=${RPC_PASS}
rpccert=${NODES_ROOT}/master/rpc.cert
rpcserver=127.0.0.1:19556
EOF
cat > "${NODES_ROOT}/vnode/dcrvctl.conf" <<EOF
rpcuser=${RPC_USER}
rpcpass=${RPC_PASS}
rpccert=${NODES_ROOT}/vnode/rpc.cert
rpcserver=127.0.0.1:19560
EOF
cat > "${NODES_ROOT}/pool/pool.conf" <<EOF
rpcuser=${RPC_USER}
rpcpass=${RPC_PASS}
dcrdrpchost=127.0.0.1:19556
dcrdrpccert=${NODES_ROOT}/master/rpc.cert
walletgrpchost=127.0.0.1:19558
walletrpccert=${NODES_ROOT}/mwallet/rpc.cert
debuglevel=trace
maxgentime=${MAX_GEN_TIME}
solopool=${SOLO_POOL}
activenet=simnet
walletpass=${WALLET_PASS}
poolfeeaddrs=${PFEE_ADDR}
paymentmethod=${PAYMENT_METHOD}
lastnperiod=${LAST_N_PERIOD}
backuppass=${BACKUP_PASS}
guidir=${GUI_DIR}
designation=${SESSION}
profile=:6060
EOF
cat > "${NODES_ROOT}/mwallet/dcrmwctl.conf" <<EOF
rpcuser=${RPC_USER}
rpcpass=${RPC_PASS}
rpccert=${NODES_ROOT}/mwallet/rpc.cert
rpcserver=127.0.0.1:19557
EOF
cat > "${NODES_ROOT}/vwallet/dcrvwctl.conf" <<EOF
rpcuser=${RPC_USER}
rpcpass=${RPC_PASS}
rpccert=${NODES_ROOT}/vwallet/rpc.cert
rpcserver=127.0.0.1:19562
EOF
cat > "${NODES_ROOT}/mwallet/mwallet.conf" <<EOF
username=${RPC_USER}
password=${RPC_PASS}
cafile=${NODES_ROOT}/master/rpc.cert
logdir=${NODES_ROOT}/mwallet/log
appdata=${NODES_ROOT}/mwallet
simnet=1
pass=${WALLET_PASS}
EOF
cat > "${NODES_ROOT}/vwallet/vwallet.conf" <<EOF
username=${RPC_USER}
password=${RPC_PASS}
cafile=${NODES_ROOT}/vnode/rpc.cert
logdir=${NODES_ROOT}/vwallet/log
appdata=${NODES_ROOT}/vwallet
simnet=1
enablevoting=1
enableticketbuyer=1
ticketbuyer.limit=10
pass=${WALLET_PASS}
rpcconnect=127.0.0.1:19560
grpclisten=127.0.0.1:19561
rpclisten=127.0.0.1:19562
EOF
cd ${NODES_ROOT} && tmux new-session -d -s $SESSION
################################################################################
# Setup the master node.
################################################################################
cat > "${NODES_ROOT}/master/ctl" <<EOF
#!/bin/sh
dcrctl -C dcrmctl.conf \$*
EOF
chmod +x "${NODES_ROOT}/master/ctl"
tmux rename-window -t $SESSION:0 'master'
tmux send-keys "cd ${NODES_ROOT}/master" C-m
echo "Starting simnet master node"
tmux send-keys "dcrd --appdata=${NODES_ROOT}/master \
--rpcuser=${RPC_USER} --rpcpass=${RPC_PASS} \
--miningaddr=${POOL_MINING_ADDR} \
--txindex \
--debuglevel=info \
--simnet" C-m
################################################################################
# Setup the master node's dcrctl (mctl).
################################################################################
cat > "${NODES_ROOT}/master/mine" <<EOF
#!/bin/sh
NUM=1
case \$1 in
''|*[!0-9]*) ;;
*) NUM=\$1 ;;
esac
for i in \$(seq \$NUM) ; do
dcrctl -C dcrmctl.conf generate 1
sleep 0.5
done
EOF
chmod +x "${NODES_ROOT}/master/mine"
tmux new-window -t $SESSION:1 -n 'mctl'
tmux send-keys "cd ${NODES_ROOT}/master" C-m
sleep 3
# mine some blocks to start the chain.
tmux send-keys "./mine 2" C-m
echo "Mined 2 blocks"
sleep 1
tmux send-keys "./ctl livetickets"
################################################################################
# Setup the pool wallet.
################################################################################
cat > "${NODES_ROOT}/mwallet/ctl" <<EOF
#!/bin/sh
dcrctl -C dcrmwctl.conf --wallet \$*
EOF
chmod +x "${NODES_ROOT}/mwallet/ctl"
tmux new-window -t $SESSION:2 -n 'mwallet'
tmux send-keys "cd ${NODES_ROOT}/mwallet" C-m
tmux send-keys "eacrwallet -C mwallet.conf --create" C-m
echo "Creating simnet master wallet"
sleep 1
tmux send-keys "${WALLET_PASS}" C-m "${WALLET_PASS}" C-m "n" C-m "y" C-m
sleep 1
tmux send-keys "${MASTER_WALLET_SEED}" C-m C-m
tmux send-keys "eacrwallet -C mwallet.conf " C-m # --debuglevel=warn
# ################################################################################
# # Setup the pool wallet's dcrctl (wctl).
# ################################################################################
sleep 10
# The consensus daemon must be synced for account generation to
# work as expected.
echo "Setting up pool wallet accounts"
tmux new-window -t $SESSION:3 -n 'mwctl'
tmux send-keys "cd ${NODES_ROOT}/mwallet" C-m
tmux send-keys "./ctl createnewaccount pfee" C-m
tmux send-keys "./ctl getnewaddress pfee" C-m
tmux send-keys "./ctl createnewaccount c1" C-m
tmux send-keys "./ctl getnewaddress c1" C-m
tmux send-keys "./ctl createnewaccount c2" C-m
tmux send-keys "./ctl getnewaddress c2" C-m
tmux send-keys "./ctl getnewaddress default" C-m
tmux send-keys "./ctl getbalance"
################################################################################
# Setup the voting node.
################################################################################
cat > "${NODES_ROOT}/vnode/ctl" <<EOF
#!/bin/sh
dcrctl -C dcrvctl.conf \$*
EOF
chmod +x "${NODES_ROOT}/vnode/ctl"
tmux new-window -t $SESSION:4 -n 'vnode'
tmux send-keys "cd ${NODES_ROOT}/vnode" C-m
echo "Starting simnet voting node"
tmux send-keys "dcrd --appdata=${NODES_ROOT}/vnode \
--rpcuser=${RPC_USER} --rpcpass=${RPC_PASS} \
--connect=127.0.0.1:18555 \
--listen=127.0.0.1:19559 --rpclisten=127.0.0.1:19560 \
--miningaddr=${CPU_MINING_ADDR} \
--txindex \
--debuglevel=info \
--simnet" C-m
################################################################################
# Setup the voting node's dcrctl (vctl).
################################################################################
sleep 3
cat > "${NODES_ROOT}/vnode/mine" <<EOF
#!/bin/sh
NUM=1
case \$1 in
''|*[!0-9]*) ;;
*) NUM=\$1 ;;
esac
for i in \$(seq \$NUM) ; do
dcrctl -C dcrvctl.conf generate 1
sleep 0.5
done
EOF
chmod +x "${NODES_ROOT}/vnode/mine"
tmux new-window -t $SESSION:5 -n 'vctl'
tmux send-keys "cd ${NODES_ROOT}/vnode" C-m
tmux send-keys "./mine 30" C-m
sleep 10
echo "Mined 30 blocks, at stake enabled height (SEH)"
################################################################################
# Setup the voting wallet.
################################################################################
cat > "${NODES_ROOT}/vwallet/ctl" <<EOF
#!/bin/sh
dcrctl -C dcrvwctl.conf --wallet \$*
EOF
chmod +x "${NODES_ROOT}/vwallet/ctl"
cat > "${NODES_ROOT}/vwallet/tickets" <<EOF
#!/bin/sh
NUM=1
case \$1 in
''|*[!0-9]*) ;;
*) NUM=\$1 ;;
esac
./ctl purchaseticket default 999999 1 \`./ctl getnewaddress\` \$NUM
EOF
chmod +x "${NODES_ROOT}/vwallet/tickets"
tmux new-window -t $SESSION:6 -n 'vwallet'
tmux send-keys "cd ${NODES_ROOT}/vwallet" C-m
tmux send-keys "eacrwallet -C vwallet.conf --create" C-m
echo "Creating simnet voting wallet"
sleep 1
tmux send-keys "${WALLET_PASS}" C-m "${WALLET_PASS}" C-m "n" C-m "y" C-m
sleep 1
tmux send-keys "${VOTING_WALLET_SEED}" C-m C-m
tmux send-keys "eacrwallet -C vwallet.conf --debuglevel=debug" C-m
################################################################################
# Setup the voting wallet's dcrctl (vwctl).
################################################################################
sleep 1
tmux new-window -t $SESSION:7 -n 'vwctl'
tmux send-keys "cd ${NODES_ROOT}/vwallet" C-m
################################################################################
# Setup eacrpool.
################################################################################
echo "Starting eacrpool"
sleep 5
tmux new-window -t $SESSION:8 -n 'pool'
tmux send-keys "cd ${NODES_ROOT}/pool" C-m
tmux send-keys "eacrpool --configfile=pool.conf --homedir=${NODES_ROOT}/pool" C-m
################################################################################
# Setup first mining client.
################################################################################
echo "Starting mining client 1"
sleep 1
tmux new-window -t $SESSION:9 -n 'c1'
tmux send-keys "cd ${NODES_ROOT}/c1" C-m
tmux send-keys "miner --configfile=client.conf --homedir=${NODES_ROOT}/c1" C-m
################################################################################
# Setup another mining client.
################################################################################
echo "Starting mining client 2"
sleep 1
tmux new-window -t $SESSION:10 -n 'c2'
tmux send-keys "cd ${NODES_ROOT}/c2" C-m
tmux send-keys "miner --configfile=client.conf --homedir=${NODES_ROOT}/c2" C-m
tmux attach-session -t $SESSION |
import { G, Path } from 'react-native-svg';
import * as React from 'react';
import withIcon from '../../lib/withIcon';
type Props = {
opacity?: string;
color?: string;
secondaryColor?: string;
set?: string;
strokeWidth?: string | number;
};
const Image = ({
color, secondaryColor, strokeWidth, opacity, set,
}: Props) => {
const Bold = () => (
<G transform="translate(2 2)">
<Path
d="M14.332,20H5.666C2.277,20,0,17.622,0,14.083V5.917C0,2.378,2.277,0,5.666,0h8.667C17.723,0,20,2.378,20,5.917v8.167C20,17.622,17.722,20,14.332,20ZM6.307,13.241c-.5,0-1.018.383-1.724,1.279l-.038.048c-.45.569-.912,1.155-1.371,1.726a.8.8,0,0,0-.162.633.716.716,0,0,0,.328.5A4.112,4.112,0,0,0,5.529,18h8.428a4.524,4.524,0,0,0,1.407-.214,3.717,3.717,0,0,0,2.261-2.111,3.756,3.756,0,0,0,.2-2.741,2.263,2.263,0,0,0-.513-.784l0,0a10.811,10.811,0,0,0-1.868-1.6,1.429,1.429,0,0,0-.76-.235,1.874,1.874,0,0,0-1.363.806c-.177.238-.33.491-.492.759l-.1.171-.007.012a5.683,5.683,0,0,1-1.96,2.218,2.487,2.487,0,0,1-1.208.335,3.516,3.516,0,0,1-1.959-.785,5.491,5.491,0,0,0-.827-.49A1.12,1.12,0,0,0,6.307,13.241ZM6.5,4A2.5,2.5,0,1,0,9,6.5,2.5,2.5,0,0,0,6.5,4Z"
fill={color}
/>
</G>
);
const Bulk = () => (
<G transform="translate(2 2)">
<Path
d="M14.333,20H5.666C2.277,20,0,17.623,0,14.084V5.917C0,2.378,2.277,0,5.666,0h8.668C17.723,0,20,2.378,20,5.917v8.168C20,17.623,17.723,20,14.333,20"
fill={secondaryColor}
opacity={opacity}
/>
<Path
d="M2.515,13.567a4.184,4.184,0,0,1-2.177-.558.736.736,0,0,1-.165-1.1c.47-.57.934-1.142,1.4-1.719.892-1.1,1.492-1.424,2.161-1.143a5.581,5.581,0,0,1,.823.475,2.807,2.807,0,0,0,3.149.436A5.617,5.617,0,0,0,9.664,7.8c.194-.311.378-.621.592-.9a1.585,1.585,0,0,1,2.111-.553A10.687,10.687,0,0,1,14.229,7.9a2.181,2.181,0,0,1,.509.76,3.564,3.564,0,0,1-.2,2.656,3.635,3.635,0,0,1-2.247,2.046,4.585,4.585,0,0,1-1.4.207ZM1.1,2.486A2.485,2.485,0,1,1,3.588,4.971,2.488,2.488,0,0,1,1.1,2.486Z"
transform="translate(3.172 4.189)"
fill={color}
/>
</G>
);
const Light = () => (
<G transform="translate(2 2)">
<Path
d="M18.46,5.149V13.3c0,3.02-1.89,5.15-4.91,5.15H4.9c-3.02,0-4.9-2.13-4.9-5.15V5.149C0,2.129,1.89,0,4.9,0h8.65C16.57,0,18.46,2.129,18.46,5.149Z"
transform="translate(0.75 0.751)"
fill="none"
stroke={color}
strokeLinecap="round"
strokeLinejoin="round"
strokeMiterlimit="10"
strokeWidth={strokeWidth}
/>
<Path
d="M0,4.14,1.529,2.527a1.4,1.4,0,0,1,2.032-.008l.885.9a1.433,1.433,0,0,0,2.131-.094L8.806.617a1.687,1.687,0,0,1,2.515-.1L13.4,2.655"
transform="translate(3.281 10.292)"
fill="none"
stroke={color}
strokeLinecap="round"
strokeLinejoin="round"
strokeMiterlimit="10"
strokeWidth={strokeWidth}
/>
<Path
d="M3.507,1.754A1.754,1.754,0,1,1,1.753,0,1.755,1.755,0,0,1,3.507,1.754Z"
transform="translate(4.806 5.38)"
fill="none"
stroke={color}
strokeLinecap="round"
strokeLinejoin="round"
strokeMiterlimit="10"
strokeWidth={strokeWidth}
/>
</G>
);
const Broken = () => (
<G transform="translate(2 2)">
<Path
d="M10.98,19.949a.75.75,0,0,1,0-1.5H14.3c2.527,0,4.16-1.727,4.16-4.4V5.9c0-2.673-1.633-4.4-4.16-4.4H5.65C3.129,1.5,1.5,3.226,1.5,5.9V14.05c0,2.672,1.629,4.4,4.15,4.4h.721a.75.75,0,0,1,0,1.5H5.65c-3.38,0-5.65-2.372-5.65-5.9V5.9C0,2.371,2.271,0,5.65,0H14.3c3.386,0,5.66,2.371,5.66,5.9V14.05c0,3.528-2.274,5.9-5.66,5.9ZM2.766,14.978a.751.751,0,0,1-.028-1.061L4.267,12.3a2.126,2.126,0,0,1,1.554-.672,2.345,2.345,0,0,1,1.561.661l.881.9a.65.65,0,0,0,.517.2.677.677,0,0,0,.5-.248l2.228-2.713a2.444,2.444,0,0,1,1.783-.887,2.471,2.471,0,0,1,1.849.739l2.077,2.141a.75.75,0,0,1-1.077,1.045l-2.077-2.141a.893.893,0,0,0-.711-.285.929.929,0,0,0-.685.342L10.438,14.1a2.185,2.185,0,0,1-1.59.795,2.244,2.244,0,0,1-1.655-.652l-.886-.9a.52.52,0,0,0-.479-.206.647.647,0,0,0-.472.2l-1.53,1.615a.752.752,0,0,1-1.06.03ZM4.056,7.133a2.5,2.5,0,1,1,2.5,2.5A2.507,2.507,0,0,1,4.056,7.133Zm1.5,0a1,1,0,1,0,1-1A1.005,1.005,0,0,0,5.557,7.133Z"
transform="translate(0 0)"
fill={color}
/>
</G>
);
const TwoTone = () => (
<G transform="translate(2 2)">
<Path
d="M18.46,5.149V13.3c0,3.02-1.89,5.15-4.91,5.15H4.9c-3.02,0-4.9-2.13-4.9-5.15V5.149C0,2.129,1.89,0,4.9,0h8.65C16.57,0,18.46,2.129,18.46,5.149Z"
transform="translate(0.75 0.751)"
fill="none"
stroke={color}
strokeLinecap="round"
strokeLinejoin="round"
strokeMiterlimit="10"
strokeWidth={strokeWidth}
/>
<Path
d="M0,4.14,1.529,2.527a1.4,1.4,0,0,1,2.032-.008l.885.9a1.433,1.433,0,0,0,2.131-.094L8.806.617a1.687,1.687,0,0,1,2.515-.1L13.4,2.655"
transform="translate(3.281 10.292)"
fill="none"
stroke={secondaryColor}
strokeLinecap="round"
strokeLinejoin="round"
strokeMiterlimit="10"
strokeWidth={strokeWidth}
opacity={opacity}
/>
<Path
d="M3.507,1.754A1.754,1.754,0,1,1,1.753,0,1.755,1.755,0,0,1,3.507,1.754Z"
transform="translate(4.806 5.38)"
fill="none"
stroke={secondaryColor}
strokeLinecap="round"
strokeLinejoin="round"
strokeMiterlimit="10"
strokeWidth={strokeWidth}
opacity={opacity}
/>
</G>
);
const Curved = () => (
<G transform="translate(2 2)">
<Path
d="M0,3.71S.812,2.073,1.994,2.073,3.78,3.448,5.09,3.448,7.868,0,9.352,0,11.9,2.391,11.9,2.391"
transform="translate(4.071 10.749)"
fill="none"
stroke={color}
strokeLinecap="round"
strokeLinejoin="round"
strokeMiterlimit="10"
strokeWidth={strokeWidth}
/>
<Path
d="M3.115,1.558A1.558,1.558,0,1,1,1.557,0,1.558,1.558,0,0,1,3.115,1.558Z"
transform="translate(5.024 5.547)"
fill="none"
stroke={color}
strokeLinecap="round"
strokeLinejoin="round"
strokeMiterlimit="10"
strokeWidth={strokeWidth}
/>
<Path
d="M0,9.25c0,6.937,2.313,9.25,9.25,9.25s9.25-2.313,9.25-9.25S16.187,0,9.25,0,0,2.313,0,9.25Z"
transform="translate(0.75 0.75)"
fill="none"
stroke={color}
strokeLinecap="round"
strokeLinejoin="round"
strokeMiterlimit="10"
strokeWidth={strokeWidth}
/>
</G>
);
switch (set) {
case 'bold':
return <Bold />;
case 'bulk':
return <Bulk />;
case 'broken':
return <Broken />;
case 'two-tone':
return <TwoTone />;
case 'curved':
return <Curved />;
default:
return <Light />;
}
};
Image.displayName = 'IconlyImage';
export default withIcon(Image);
|
import logging
class CustomLogger(logging.Logger):
def log_custom(self, message, severity):
if severity == 0:
self.debug(message)
elif severity == 1:
self.info(message)
elif severity == 2:
self.warning(message)
elif severity == 3:
self.error(message)
elif severity == 4:
self.critical(message)
else:
raise ValueError("Invalid severity level")
# Usage example
custom_logger = CustomLogger(__name__)
custom_logger.log_custom("Debug message", 0)
custom_logger.log_custom("Info message", 1)
custom_logger.log_custom("Warning message", 2)
custom_logger.log_custom("Error message", 3)
custom_logger.log_custom("Critical message", 4) |
<filename>JavaScript/promises.js
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise
// The Promise.race() method returns a promise that fulfills or rejects as soon as one of the promises in an iterable fulfills or rejects, with the value or reason from that promise.
const promise1 = new Promise((resolve, reject) => {
setTimeout(resolve, 500, "one");
});
const promise2 = new Promise((resolve, reject) => {
setTimeout(resolve, 100, "two");
});
Promise.race([promise1, promise2]).then((value) => {
console.log(value);
// Both resolve, but promise2 is faster
});
// expected output: "two"
// For example, the following code races two Promises. The second one resolves sooner, and the result of the other one is discarded:
const p1 = new Promise((res) => setTimeout(() => res("p1"), 1000));
const p2 = new Promise((res) => setTimeout(() => res("p2"), 500));
const result = await Promise.race([p1, p2]);
// result = p2
// Similarly, it works for rejections also. If the winning Promise is rejected, the race is rejected:
const p1 = new Promise((res) => setTimeout(() => res("p1"), 1000));
const p2 = new Promise((_r, rej) => setTimeout(() => rej("p2"), 500));
try {
const result = await Promise.race([p1, p2]);
} catch (e) {
// e = p2
}
|
#! /bin/bash -e
#Log output
exec > >(tee /var/log/Install-Region.log|logger -t user-data -s 2>/dev/console) 2>&1
if [ "$#" -ne 1 ]
then
echo "Not Enough Arguments supplied."
echo "Configure-ODBC <path to Region xml>"
exit 1
fi
RegionXMLPath=$1
export TERM="xterm"
shift
source /opt/microfocus/EnterpriseDeveloper/bin/cobsetenv
mfds -g 5 $RegionXMLPath D |
#!/bin/bash
isMySQLExists="$(ps aux | grep mysql | grep -v grep)"
size=${#isMySQLExists}
if (($size > 0))
then
echo ${isMySQLExists}
echo $size
else
echo "mysql isn't running!"
fi
|
def is_coprime(a, b):
if all(a % i != 0 or b % i != 0 for i in range(2, min(a, b) + 1)):
return True
else:
return False
result = is_coprime(10, 25)
print(result) |
# Use a loop to print the results of the following equation: x2 - 3x + 4
for x in range(1, 11):
result = (x**2) - (3*x) + 4
print(f"x = {x}, result = {result}") |
var searchData=
[
['core_5fcm7_2etxt',['core_cm7.txt',['../core__cm7_8txt.html',1,'']]]
];
|
<reponame>rovedit/Fort-Candle<filename>extern/glow-extras/pipeline/glow-extras/pipeline/stages/implementations/OutputStage.hh
#pragma once
#include <array>
#include <string>
#include <glow/common/shared.hh>
#include <glow/fwd.hh>
#include "../../fwd.hh"
#include "../RenderStage.hh"
#include "../StageCamera.hh"
namespace glow
{
namespace pipeline
{
GLOW_SHARED(class, OutputStage);
class OutputStage : public RenderStage
{
private:
/// Size of the color LUT in each dimensions
/// This value has to coincide with the define COLOR_LUT_DIMS
/// in shader/internal/pass/output/output.fsh
static constexpr auto colorLutDimensions = 16u;
// == Shaders ==
SharedProgram mShaderOutput;
bool mShaderRegistered = false;
// == Dependencies ==
SharedPostprocessingStage mStagePostprocessing;
// == Other ==
SharedTexture3D mNeutralColorLut;
bool mViewportEnabled = false;
tg::ipos2 mViewportOffset;
tg::isize2 mViewportSize;
private:
static SharedTexture3D generateNeutralLut();
protected:
/// Called when the stage is supposed to run
void onExecuteStage(RenderContext const& ctx, RenderCallback& rc) override;
public:
OutputStage(SharedPostprocessingStage const& postprocessingStage);
void setViewport(tg::ipos2 const& offset, tg::isize2 const& size);
void clearViewport();
bool isViewportEnabled() const { return mViewportEnabled; }
SharedPostprocessingStage const& getPostprocessingStage() const { return mStagePostprocessing; }
StageType getType() const override { return StageType::Internal; }
std::string name() const override { return "Output"; }
};
}
}
|
#!/bin/bash
###############################################################################
# CONTENT header
# eQTL detection for filtering evaluation
# GOAL: run cis eQTL detection with FastQTL for filtering evaluation
# DATE: 30 July 2020
# INPUT: normalized gene counts files (.tab)
# OUTPUT: eQTL statistics files (.txt)
###############################################################################
# directories definitions
refdir=/scratch/wally/FAC/FBM/CIG/pfranken/bxd_map2/data/references
mapdir=/scratch/wally/FAC/FBM/CIG/pfranken/bxd_map2/data/FilteringTesting/mapping
eQTLdir=/scratch/wally/FAC/FBM/CIG/pfranken/bxd_map2/data/FilteringTesting/eQTL
codedir=/users/ngobet/scripts
# transfert input on cluster
mkdir -p bxdmap_scratchdirectory2/data/FilteringTesting
mkdir -p $mapdir
mkdir -p $eQTLdir
scp -r ngobet@pccig3009.unil.ch:/mnt/nas/BXD/data/transcriptome/4_normalization/FilteringTesting/*.tab $mapdir
# fastQTL is used to:
# tests association for a molecular phenotype only for variants that are 2 Mb above or below the transcription start site of the gene coding for the phenotype (--window 2e6, default is 1e6)
# chose to use seed one to help reproducibility of permutations (--seed 1, no default)
# uses the beta distribution approximation estimated from 1000 permutations to calculate adjusted p-values (--permute 1000, no default)
# EPACTS is used for compression and indexing of input files.
# load softwares
module load HPC/Software
module add UHTS/Analysis/FastQTL/2.184
module add UHTS/Analysis/EPACTS/3.2.6
# copy gene position file from 3009 (run command from Wally)
##scp -r ngobet@pccig3009.unil.ch:/mnt/nas/BXD/references/transcriptome/GenePosition.txt $refdir/transcriptome_annotation/GenePosition.txt
# pre-processing: format and index genotypes
# transform genotypes to vcf format
scripts/transformGenotypesToVcf.py $refdir/genotypes/BXDGenotypes.geno $refdir/genotypes/BXDGenotypes.vcf
# compress and index genotypes
bgzip -f $refdir/genotypes/BXDGenotypes.vcf && tabix -p vcf $refdir/genotypes/BXDGenotypes.vcf.gz
listexpressionfiles=($(ls -d $mapdir/*.tab | cut -d "/" -f 12))
for file in "${listexpressionfiles[@]}"
do
echo $file
filtering=$(basename -s .tab $file)
echo $filtering
# transform phenotypes (gene expression) to bed format (UCSC format)
scripts/transformGenePhenotypesToBedUCSC.py $mapdir/$file $refdir/genotypes/BXDGenotypes.geno $refdir/transcriptome_annotation/GenePosition.txt $mapdir/$filtering.bed > $mapdir/$filtering\_transformGenePhenotypesToBedUCSC.out 2> $mapdir/$filtering\_transformGenePhenotypesToBedUCSC.err
# compress and index phenotypes
bgzip -f $mapdir/$filtering.bed && tabix -p bed $mapdir/$filtering.bed.gz
# prepare commands
fastQTL --vcf $refdir/genotypes/BXDGenotypes.vcf.gz --bed $mapdir/$filtering.bed.gz --out $eQTLdir/$filtering --commands 25 $codedir/eQTL_$filtering\_listcommands.sh --window 2e6 --permute 1000 --seed 1
# run commands
bash $codedir/eQTL_$filtering\_listcommands.sh > $eQTLdir/eQTL_$filtering\_listcommands.out 2> $eQTLdir/eQTL_$filtering\_listcommands.err
# check if it run correctly, if not exclude last phenotype before error
testvar=$(wc -l $eQTLdir/eQTL_$filtering\_listcommands.err | cut -d " " -f 1)
while [ $testvar -gt 0 ]
do
# check if it run correctly, if not exclude last phenotype before error
echo "Error detected"
# identify last phenotype
issuecommandfull=$(grep -m 1 "fastQTL" $eQTLdir/eQTL_$filtering\_listcommands.err | sed 's/ \+/,/g' | cut -d "," -f 2)
issuecommand=$(sed 's/ --exclude-phenotypes PhenoToExclude.txt//g' <<< $issuecommandfull)
$issuecommand &> issueresults.txt
grep "Processing gene" issueresults.txt | tail -n 1 | cut -d " " -f 3 | sed 's/[][]//g' >> $eQTLdir/$filtering\_PhenoToExclude.txt
# add exclude phenotype(s) option to script if not already contained
if ! grep -q "\-\-exclude\-phenotypes" $codedir/eQTL_$filtering\_listcommands.sh
then
sed -i "s,$, --exclude-phenotypes $eQTLdir\/$filtering\_PhenoToExclude.txt,g" $codedir/eQTL_$filtering\_listcommands.sh
fi
# re-run excluding this phenotype
bash $codedir/eQTL_$filtering\_listcommands.sh > $eQTLdir/eQTL_$filtering\_listcommands.out 2> $eQTLdir/eQTL_$filtering\_listcommands.err
rm issueresults.txt
echo $testvar
testvar=$(wc -l $eQTLdir/eQTL_$filtering\_listcommands.err | cut -d " " -f 1)
done
echo "The eQTL detection successfully run."
# post-processing
# group results from different regions
cat $eQTLdir/$filtering.chr* > $eQTLdir/$filtering.txt
# add entry with NA for each gene excluded from analysis if any
if [ -e $eQTLdir/$filtering\_PhenoToExclude.txt ]
then
sed 's,$, 0 NA NA NA NA NA NA NA NA NA,g' $eQTLdir/$filtering\_PhenoToExclude.txt >> $eQTLdir/$filtering.txt
fi
# clean up unneeded files
rm $eQTLdir/$filtering.chr* $eQTLdir/$filtering
# calculate q-values
Rscript $codedir/correctMultiPhenotypeseQTL.R $eQTLdir/$filtering
done
# copy files back on 3009 (command to run from cluster)
scp -r bxdmap_scratchdirectory2/data/FilteringTesting ngobet@pccig3009.unil.ch:/mnt/nas/BXD/data/transcriptome/
|
<gh_stars>0
package py.edu.uca.lp3.repository;
import org.springframework.data.repository.PagingAndSortingRepository;
import py.edu.uca.lp3.domain.Director;
//interface para almacenar y recuperar los datos sobre los directores de los clubes
public interface DirectorRepository extends PagingAndSortingRepository<Director, Long> {
}
|
#!/usr/bin/env sh
################################################################################
# VARIABLES
################################################################################
count=1
reset="\033[0m"
highlight="\033[41m\033[97m"
dot="\033[31m▸ $reset"
dim="\033[2m"
blue="\e[34m"
green="\e[32m"
yellow="\e[33m"
tag_green="\e[30;42m"
tag_blue="\e[30;46m"
bold=$(tput bold)
normal=$(tput sgr0)
underline="\e[37;4m"
indent=" "
# Get full directory name of this script
cwd="$(cd "$(dirname "$0")" && pwd)"
NVM_DIRECTORY="$HOME/.nvm"
NVM_SOURCE_PATH="#!/usr/bin/env bash\\nexport NVM_DIR=\"${NVM_DIRECTORY}\"\\n[ -s \"\$NVM_DIR/nvm.sh\" ] && \\. \"\$NVM_DIR/nvm.sh\" # This loads nvm\\n"
NVM_COMPLETION_PATH='[ -s "$NVM_DIR/bash_completion" ] && \. "$NVM_DIR/bash_completion" # This loads nvm bash_completion\n'
################################################################################
# Utility Functions
################################################################################
_print_in_color() {
printf "%b" \
"$(tput setaf "$2" 2> /dev/null)" \
"$1" \
"$(tput sgr0 2> /dev/null)"
}
_print_error_stream() {
while read -r line; do
print_in_red " ↳ ERROR: $line\n"
done
}
_show_spinner() {
local -r FRAMES='/-\|'
# shellcheck disable=SC2034
local -r NUMBER_OR_FRAMES=${#FRAMES}
local -r CMDS="$2"
local -r MSG="$3"
local -r PID="$1"
local i=0
local frameText=""
# -----------------------------------------------------------------
# Note: In order for the Travis CI site to display
# things correctly, it needs special treatment, hence,
# the "is Travis CI?" checks.
if [ "$TRAVIS" != "true" ]; then
# Provide more space so that the text hopefully
# doesn't reach the bottom line of the terminal window.
#
# This is a workaround for escape sequences not tracking
# the buffer position (accounting for scrolling).
#
# See also: https://unix.stackexchange.com/a/278888
printf "\n\n\n"
tput cuu 3
tput sc
fi
# -----------------------------------------------------------------
# Display spinner while the commands are being executed.
while kill -0 "$PID" &>/dev/null; do
frameText=" [${FRAMES:i++%NUMBER_OR_FRAMES:1}] $MSG"
# -------------------------------------------------------------
# Print frame text.
if [ "$TRAVIS" != "true" ]; then
printf "%s\n" "$frameText"
else
printf "%s" "$frameText"
fi
sleep 0.2
# -------------------------------------------------------------
# Clear frame text.
if [ "$TRAVIS" != "true" ]; then
tput rc
else
printf "\r"
fi
done
}
_kill_all_subprocesses() {
local i=""
for i in $(jobs -p); do
kill "$i"
wait "$i" &> /dev/null
done
}
_set_trap() {
trap -p "$1" | grep "$2" &> /dev/null \
|| trap '$2' "$1"
}
_link_file() {
local src=$1 dst=$2
local overwrite= backup= skip=
local action=
if [ -f "$dst" -o -d "$dst" -o -L "$dst" ]
then
if [ "$overwrite_all" == "false" ] && [ "$backup_all" == "false" ] && [ "$skip_all" == "false" ]
then
local currentSrc="$(readlink $dst)"
if [ "$currentSrc" == "$src" ]
then
skip=true;
else
printf "\r ${yellow}!${reset} File already exists: $dst ($(basename "$src")), what do you want to do?
[s]kip, [S]kip all, [o]verwrite, [O]verwrite all, [b]ackup, [B]ackup all? "
read -n 1 action
case "$action" in
o )
overwrite=true;;
O )
overwrite_all=true;;
b )
backup=true;;
B )
backup_all=true;;
s )
skip=true;;
S )
skip_all=true;;
* )
;;
esac
fi
fi
overwrite=${overwrite:-$overwrite_all}
backup=${backup:-$backup_all}
skip=${skip:-$skip_all}
if [ "$overwrite" == "true" ]
then
rm -rf "$dst"
print_in_green "\n ✓ deleted $dst"
fi
if [ "$backup" == "true" ]
then
mv "$dst" "${dst}.backup"
print_in_green "\n ✓ moved $dst to ${dst}.backup"
fi
if [ "$skip" == "true" ]
then
printf "\n ${dim}✓ $src already linked. Skipped.${reset}"
fi
fi
if [ "$skip" != "true" ] # "false" or empty
then
ln -s "$1" "$2"
print_in_green "\n ✓ linked $1 to $2"
fi
}
################################################################################
# Print Functions
################################################################################
print_in_red() {
_print_in_color "$1" 1
}
print_in_green() {
_print_in_color "$1" 2
}
print_in_yellow() {
_print_in_color "$1" 3
}
print_in_blue() {
_print_in_color "$1" 4
}
print_in_purple() {
_print_in_color "$1" 5
}
print_in_cyan() {
_print_in_color "$1" 6
}
print_in_white() {
_print_in_color "$1" 7
}
print_result() {
if [ "$1" -eq 0 ]; then
print_success "$2"
else
print_error "$2"
fi
return "$1"
}
print_question() {
print_in_yellow " [?] $1\n"
}
print_success() {
print_in_green " [✓] $1\n"
}
print_success_muted() {
printf " ${dim}[✓] $1${reset}\n" "$@"
}
print_muted() {
printf " ${dim}$1${reset}\n" "$@"
}
print_warning() {
print_in_yellow " [!] $1\n"
}
print_error() {
print_in_red " [𝘅] $1 $2\n"
}
################################################################################
# Meta Checks
################################################################################
get_os() {
local os=""
local kernelName=""
# -----------------------------------------------------------------
kernelName="$(uname -s)"
if [ "$kernelName" == "Darwin" ]; then
os="macOS"
elif [ "$kernelName" == "Linux" ] && [ -e "/etc/lsb-release" ]; then
os="ubuntu"
else
os="$kernelName"
fi
printf "%s" "$os"
}
get_os_version() {
local os=""
local version=""
# -----------------------------------------------------------------
os="$(get_os)"
if [ "$os" == "macOS" ]; then
version="$(sw_vers -productVersion)"
elif [ "$os" == "ubuntu" ]; then
version="$(lsb_release -d | cut -f2 | cut -d' ' -f2)"
fi
printf "%s" "$version"
}
check_internet_connection() {
if [ ping -q -w1 -c1 google.com &>/dev/null ]; then
print_error "Please check your internet connection";
exit 0
else
print_success "Internet connection";
fi
}
################################################################################
# Execution
################################################################################
execute() {
local -r CMDS="$1"
local -r MSG="${2:-$1}"
local -r TMP_FILE="$(mktemp /tmp/XXXXX)"
local exitCode=0
local cmdsPID=""
# -----------------------------------------------------------------
# If the current process is ended,
# also end all its subprocesses.
_set_trap "EXIT" "_kill_all_subprocesses"
# -----------------------------------------------------------------
# Execute commands in background
eval "$CMDS" \
&> /dev/null \
2> "$TMP_FILE" &
cmdsPID=$!
# -----------------------------------------------------------------
# Show a spinner if the commands
# require more time to complete.
_show_spinner "$cmdsPID" "$CMDS" "$MSG"
# -----------------------------------------------------------------
# Wait for the commands to no longer be executing
# in the background, and then get their exit code.
wait "$cmdsPID" &> /dev/null
exitCode=$?
# -----------------------------------------------------------------
# Print output based on what happened.
print_result $exitCode "$MSG"
if [ $exitCode -ne 0 ]; then
_print_error_stream < "$TMP_FILE"
fi
rm -rf "$TMP_FILE"
# -----------------------------------------------------------------
return $exitCode
}
mkd() {
if [ -n "$1" ]; then
if [ -e "$1" ]; then
if [ ! -d "$1" ]; then
print_error "$1 - a file with the same name already exists!"
else
printf " ${dim}✓ $1 already exists. Skipped.${reset}\n"
fi
else
execute "mkdir -p $1" "$1"
fi
fi
}
symlink_files() {
local overwrite_all=false backup_all=false skip_all=false
for src in $(find -H "symlink" -maxdepth 2 -type f -not -path '*.git*')
do
dst="$HOME/.$(basename "${src#%.*}")"
_link_file "$(pwd)/$src" "$dst"
done
print_in_green "\n Symlink finished! \n"
}
################################################################################
# Prompts
################################################################################
ask_for_sudo() {
# Ask for the administrator password upfront.
sudo -v &> /dev/null
# Update existing `sudo` time stamp
# until this script has finished.
#
# https://gist.github.com/cowboy/3118588
# Keep-alive: update existing `sudo` time stamp until script has finished
while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done &>/dev/null &
print_success "Password cached"
}
ask() {
# https://djm.me/ask
local prompt default reply
while true; do
if [ "${2:-}" = "Y" ]; then
prompt="Y/n"
default=Y
elif [ "${2:-}" = "N" ]; then
prompt="y/N"
default=N
else
prompt="y/n"
default=
fi
# Ask the question (not using "read -p" as it uses stderr not stdout)
echo " [?] $1 [$prompt] "
# Read the answer (use /dev/tty in case stdin is redirected from somewhere else)
read reply </dev/tty
# Default?
if [ -z "$reply" ]; then
reply=$default
fi
# Check if the reply is valid
case "$reply" in
Y*|y*) return 0 ;;
N*|n*) return 1 ;;
esac
done
}
################################################################################
#
################################################################################
xcode_tools_is_installed() {
xcode-select --print-path &> /dev/null
}
# return 1 if global command line program installed, else 0
cli_is_installed() {
# set to 1 initially
local return_=1
# set to 0 if not found
type $1 >/dev/null 2>&1 || { local return_=0; }
# return value
echo "$return_"
}
copy_key_github() {
inform 'Public key copied! Paste into Github…'
[[ -f $pub ]] && cat $pub | pbcopy
open 'https://github.com/account/ssh'
read -p " ✦ Press enter to continue…"
print_success "SSH key"
return
}
github_key_check() {
if ask "SSH key found. Enter it in Github?" Y; then
copy_key_github;
else
print_success "SSH key";
fi
}
create_ssh_key() {
if ask "No SSH key found. Create one?" Y; then
ssh-keygen -t rsa; github_key_check;
else
return 0;
fi
}
ssh_key_setup() {
local pub=$HOME/.ssh/id_rsa.pub
if ! [[ -f $pub ]]; then
create_ssh_key
else
github_key_check
fi
}
mas_setup() {
if mas account > /dev/null; then
return 0
else
return 1
fi
}
install_brews() {
if [[ ! $(brew list --formula | grep $brew) ]]; then
echo_install "Installing $brew"
brew install $brew >/dev/null
print_in_green "${bold}✓ installed!${normal}\n"
else
print_success_muted "$brew already installed. Skipped."
fi
}
install_casks() {
if [[ ! $(brew list --cask | grep $brew) ]]; then
echo_install "Installing $brew"
brew install $brew >/dev/null
print_in_green "${bold}✓ installed!${normal}\n"
else
print_success_muted "$brew already installed. Skipped."
fi
}
install_application_via_app_store() {
if ! mas list | grep $1 &> /dev/null; then
echo_install "Installing $2"
mas install $1 >/dev/null
print_in_green "${bold}✓ installed!${normal}\n"
else
print_success_muted "$2 already installed. Skipped."
fi
}
install_npm_packages() {
if [[ $(cli_is_installed $2) == 0 ]]; then
echo_install "Installing $1"
npm install $1 -g --silent
print_in_green "${bold}✓ installed!${normal}\n"
else
print_success_muted "$1 already installed. Skipped."
fi
}
# The releases are returned in the format
# {"id":3622206,"tag_name":"hello-1.0.0.11",…}
# we have to extract the tag_name.
get_github_version() {
echo $1 | sed -e 's/.*"tag_name":"\([^"]*\)".*/\1/'
}
################################################################################
# Text Formatting
################################################################################
title() {
local fmt="$1"; shift
printf "\n✦ ${bold}$fmt${normal}\n└─────────────────────────────────────────────────────○\n" "$@"
}
chapter() {
local fmt="$1"; shift
printf "\n✦ ${bold}$((count++)). $fmt${normal}\n└─────────────────────────────────────────────────────○\n" "$@"
}
echo_install() {
local fmt="$1"; shift
printf " [↓] $fmt " "$@"
}
todo() {
local fmt="$1"; shift
printf " [ ] $fmt\n" "$@"
}
inform() {
local fmt="$1"; shift
printf " ✦ $fmt\n" "$@"
}
announce() {
local fmt="$1"; shift
printf "○───✦ $fmt\n" "$@"
}
step() {
printf "\n ${dot}${underline}$@${reset}\n"
}
label_blue() {
printf "\e[30;46m $1 \033[0m\e[34m $2 \033[0m\n"
}
label_green() {
printf "\e[30;42m $1 \e[0m\e[32m $2 \033[0m\n"
}
e_message() {
printf "
╭───────────────────────────────────────────────────╮
│ ${bold}Congrats! You're all setup!${normal} │
│───────────────────────────────────────────────────│
│ Thanks for using macOS Setup! │
│ If you liked it, then you should star it! │
│ │
│ https://github.com/adsric/macos-setup │
╰───────────────────────────────────────────────────╯
"
}
|
import {
User,
Course
} from '../../../../src/typings/index.d';
import {assignCourseUserIds} from './utilities-service';
import {schema} from '../graphcool/testSchema';
import {GQLRequest} from '../../../../src/node_modules/prendus-shared/services/graphql-service';
import {createUUID} from '../../../../src/node_modules/prendus-shared/services/utilities-service';
const {
getNamedType,
GraphQLObjectType,
} = require('graphql');
const AUTH_TOKEN = '<KEY>';
const dependencyTree = [
'Discipline',
'Subject',
'Concept',
'Question',
'Quiz',
'QuestionResponse',
'QuestionResponseRating',
'UserEssay',
'CategoryScore',
'QuestionRating',
'Assignment',
'Course'
];
function dependencySort(typeA, typeB): number {
return dependencyTree.indexOf(typeA) < dependencyTree.indexOf(typeB) ? 1 : -1;
}
export async function saveArbitrary(arb, name: string): Promise<object> {
const mut = schema.getMutationType().getFields()[name];
const gql = mutation(arb, mut, name);
const data = await GQLRequest(gql, arb, AUTH_TOKEN, handleError);
return data[name];
}
export async function deleteArbitrary(data, name: string) {
const mut = schema.getMutationType().getFields()[name];
const typedIds = flattenTypedIds(data, mut.type);
const variables = deleteVariables(typedIds);
const gql = deleteGql(typedIds);
return GQLRequest(gql, variables, AUTH_TOKEN, handleError);
}
function operationParameters(arb, mut) {
return Object.keys(arb)
.map(field => {
const i = mut.args.findIndex(arg => arg.name === field);
if (i === -1)
throw new Error(`Field ${field} is not in type ${mut.type.toString()}`);
return `$${field}: ${mut.args[i].type.toString()}`;
})
.join('\n');
}
function mutationParameters(arb, mut) {
return Object.keys(arb)
.map(field => {
if (!mut.args.some(arg => arg.name === field))
throw new Error(`Field ${field} is not in type ${mut.type.toString()}`);
return `${field}: $${field}`;
})
.join('\n');
}
function mutationSelections(arb, type, name) {
if (Array.isArray(arb)) {
if (arb.length)
return arb
.map(el => mutationSelections(el, type, name))
.reduce((max, str) => str.length > max.length ? str : max, '');
else if (getNamedType(type) instanceof GraphQLObjectType)
return `${name} {id}\n`
else
return `${name}\n`;
}
else if (typeof arb !== 'object')
return `${name}\n`;
const gqlFields = getNamedType(type).getFields();
const fields = Object.keys(arb).filter(field => Boolean(gqlFields[field]));
const selections = fields
.map(
field => mutationSelections(arb[field], gqlFields[field].type, field)
)
.join('\n');
return `
${name} {
id
${selections}
}
`;
}
function mutation(arb, mut, name) {
return `mutation saveArbitrary(
${operationParameters(arb, mut)}
) {
${name} (
${mutationParameters(arb, mut)}
) ${mutationSelections(arb, mut.type, '')}
}
`;
}
function deleteVariables(typedIds) {
return typedIds.reduce(
(result, { type, id }) => ({
...result,
[id]: id
}),
{}
);
}
function deleteGql(typedIds) {
const params = typedIds.map(({ id }) => `$${id}: ID!`).join(', ');
return `
mutation del(${params}) {
` + typedIds
.map(
({ type, id }, i) => `
${type}${i}: delete${type}(id: $${id}) {
id
}
`
)
.join("\n")
+ `
}
`;
}
function flattenTypedIds(data, type) {
if (Array.isArray(data))
return data
.map(datum => flattenTypedIds(datum, type))
.reduce(flatten, []);
const rawType = getNamedType(type);
if (!(rawType instanceof GraphQLObjectType))
return [];
const fields = rawType.getFields();
return Object.keys(data).reduce(
(result, k) => {
if (!data[k])
return result;
return k === 'id'
? [ ...result, { type: rawType.name, id: data.id } ]
: [ ...result, ...flattenTypedIds(data[k], fields[k].type) ]
},
[]
)
.reduce((filtered, typedId) => (filtered.some(({ id }) => id === typedId.id) ? filtered : [...filtered, typedId]), [])
.sort((a, b) => dependencySort(a.type, b.type));
}
export async function createTestUser(role: string, name: string): Promise<User> {
const uid = createUUID();
const email = `test-${role}${name || ''}${<EMAIL>}@<EMAIL>`;
const data = await GQLRequest(`mutation create($email: String!) {
signupUser(email: $email, password: "<PASSWORD>") {
id
}
authenticateUser(email: $email, password: "<PASSWORD>") {
token
}
}`, {email}, AUTH_TOKEN, handleError);
const id = data.signupUser.id;
await GQLRequest(`mutation update($id: ID!, $role: UserRole!) {
updateUser(id: $id, role: $role) {
id
role
}
}`, {id, role}, AUTH_TOKEN, handleError);
return {
id: data.signupUser.id,
role,
token: data.authenticateUser.token
}
}
export async function deleteTestUsers(...users: User[]): Promise<object> {
const params = users.map((user, i) => `$user${i}: ID!`).join(', ');
const query = `
mutation del(${params}) {
` + users.map((user, i) => `user${i}: deleteUser(id: $user${i}) { id }`) + `
}
`;
const variables = users.reduce((result, user, i) => {
return {
...result,
[`user${i}`]: user.id
}
}, {});
await GQLRequest(query, variables, AUTH_TOKEN, handleError);
}
export async function deleteCourseArbitrary(courseId: string): Promise<object> {
const data = await GQLRequest(`
query getCourse($courseId: ID!) {
Course(id: $courseId) {
id
assignments {
id
analytics {
id
}
questions {
id
quiz {
id
}
ratings {
id
scores {
id
}
}
responses {
id
ratings {
id
scores {
id
}
}
userEssays {
id
}
}
concept {
id
subject {
id
discipline {
id
}
}
}
analytics {
id
}
}
}
purchases {
id
}
}
}
`, { courseId }, AUTH_TOKEN, handleError);
return deleteArbitrary(data.Course, 'createCourse');
}
export function enrollInTestCourse(userId: string, courseId: string): Promise<object> {
return GQLRequest(`
mutation authorizeUserOnCourse($userId: ID!, $courseId: ID!) {
addToStudentsAndCourses(
enrolledCoursesCourseId: $courseId,
enrolledStudentsUserId: $userId
) {
enrolledStudentsUser {
id
}
}
}
`, { userId, courseId }, AUTH_TOKEN, handleError);
}
export function payForTestCourse(userId: string, courseId: string): Promise<object> {
return GQLRequest(`
mutation authorizeUserOnCourse($userId: ID!, $courseId: ID!) {
createPurchase(
amount: 1000,
stripeTokenId: "fake-token-for-testing",
userId: $userId,
courseId: $courseId
) {
id
}
}
`, { userId, courseId }, AUTH_TOKEN, handleError);
}
export async function authorizeTestUserOnCourse(userId: string, courseId: string): Promise<object> {
await enrollInTestCourse(userId, courseId);
await payForTestCourse(userId, courseId);
}
export async function getAnalytics(filter: object): Promise<object> {
const data = await GQLRequest(`
query getAnalytics($filter: PrendusAnalyticsFilter) {
allPrendusAnalyticses(orderBy: createdAt_ASC, filter: $filter) {
verb
course {
id
}
assignment {
id
}
question {
id
}
}
}
`, {filter}, AUTH_TOKEN, handleError);
return data.allPrendusAnalyticses;
}
export async function setupTestCourse(course): Promise<object> {
const author = await createTestUser('STUDENT', 'author');
const viewer = await createTestUser('STUDENT', 'viewer');
const instructor = await createTestUser('INSTRUCTOR');
const data = await saveArbitrary(
assignCourseUserIds(course, instructor.id, author.id),
'createCourse'
);
return {
author,
viewer,
instructor,
data
}
}
export async function cleanupTestCourse(data, author, viewer, instructor) {
await deleteCourseArbitrary(data.id);
await deleteTestUsers(author, viewer, instructor);
}
function handleError(err: any) {
console.error(JSON.stringify(err));
console.error(err.message);
}
function flatten(arr: any[], el: any): any[] {
return arr.concat(Array.isArray(el) ? el.reduce(flatten, []) : el);
}
|
'use strict'
const Enums = require('./enum')
class Enum {
#items;
#enums;
constructor (enums) {
this.#items = new Enums(enums, { ignoreCase: true })
this.#enums = this.#items.enums
this.#setKeyValue()
}
#setKeyValue () {
this.#enums.forEach(enumItem => {
this[enumItem.key] = enumItem
})
}
#getArray (option) {
const items = []
this.#enums.forEach(function (enumItem) {
items.push(enumItem[option])
})
return items
}
get enum () {
return this.#items
}
get values () {
return this.#getArray('value')
}
get keys () {
return this.#getArray('key')
}
get valuesString () {
return this.values.toString()
}
get valuesStringWithSpace () {
return this.values.join(', ')
}
get keysString () {
return this.keys.toString()
}
get keysStringWithSpace () {
return this.keys.join(', ')
}
make (value) {
return this.#items.get(value)
}
has (value) {
return this.#items.isDefined(value)
}
}
module.exports = Enum
|
<filename>src/implementation/Boj1764.java
package implementation;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.StringTokenizer;
public class Boj1764 {
private static final String SPACE = " ";
private static final String NEW_LINE = "\n";
public static void main(String[] args) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine(), SPACE);
int N = Integer.parseInt(st.nextToken()); // didn't hear
int M = Integer.parseInt(st.nextToken()); // didn't see
String[] notSH = new String[N+M];
for(int i = 0; i < N + M; i++){
notSH[i] = br.readLine();
}
Arrays.sort(notSH);
int cnt = 0;
StringBuilder sb = new StringBuilder();
for(int i = 1; i < N + M; i++){
if(notSH[i].equals(notSH[i - 1])){
sb.append(notSH[i]).append(NEW_LINE);
cnt++;
}
}
System.out.println(cnt);
System.out.println(sb.toString());
}
}
|
#!/bin/bash
echo "I was executed"
echo "Good job admin"
echo "Have a nice day!"
ls -al
|
package algoliasearch
import (
"encoding/json"
"errors"
"fmt"
"net/http"
"net/url"
"time"
)
type client struct {
appID string
transport *Transport
}
// NewClient instantiates a new `Client` from the provided `appID` and
// `apiKey`. Default hosts are used for the transport layer.
func NewClient(appID, apiKey string) Client {
return &client{
appID: appID,
transport: NewTransport(appID, apiKey),
}
}
// NewClientWithHosts instantiates a new `Client` from the provided `appID` and
// `apiKey`. The transport layers' hosts are initialized with the given
// `hosts`.
func NewClientWithHosts(appID, apiKey string, hosts []string) Client {
return &client{
appID: appID,
transport: NewTransportWithHosts(appID, apiKey, hosts),
}
}
func (c *client) GetAppID() string {
return c.appID
}
func (c *client) SetExtraHeader(key, value string) {
c.transport.setExtraHeader(key, value)
}
func (c *client) SetTimeout(connectTimeout, readTimeout int) {
// As SetTimeout is about to be deprecated, we simply forward the
// readTimeout but ignores the connectTimeout that is not longer
// configurable.
c.SetReadTimeout(time.Duration(readTimeout) * time.Second)
}
func (c *client) SetReadTimeout(t time.Duration) { c.transport.setTimeouts(t, -1, -1, -1) }
func (c *client) SetWriteTimeout(t time.Duration) { c.transport.setTimeouts(-1, t, -1, -1) }
func (c *client) SetAnalyticsTimeout(t time.Duration) { c.transport.setTimeouts(-1, -1, t, -1) }
func (c *client) SetInsightsTimeout(t time.Duration) { c.transport.setTimeouts(-1, -1, -1, t) }
func (c *client) SetMaxIdleConnsPerHosts(maxIdleConnsPerHost int) {
c.transport.setMaxIdleConnsPerHost(maxIdleConnsPerHost)
}
func (c *client) SetHTTPClient(client *http.Client) {
c.transport.httpClient = client
}
func (c *client) ListIndexes() (indexes []IndexRes, err error) {
return c.ListIndexesWithRequestOptions(nil)
}
func (c *client) ListIndexesWithRequestOptions(opts *RequestOptions) (indexes []IndexRes, err error) {
var res listIndexesRes
err = c.request(&res, "GET", "/1/indexes", nil, read, opts)
indexes = res.Items
return
}
func (c *client) InitIndex(name string) Index {
return NewIndex(name, c)
}
func (c *client) InitAnalytics() Analytics {
return NewAnalytics(c)
}
func (c *client) InitInsights() Insights {
return NewInsights(c)
}
func (c *client) ListKeys() (keys []Key, err error) {
return c.ListAPIKeys()
}
func (c *client) ListKeysWithRequestOptions(opts *RequestOptions) (keys []Key, err error) {
return c.ListAPIKeysWithRequestOptions(opts)
}
func (c *client) ListAPIKeys() (keys []Key, err error) {
return c.ListAPIKeysWithRequestOptions(nil)
}
func (c *client) ListAPIKeysWithRequestOptions(opts *RequestOptions) (keys []Key, err error) {
var res listAPIKeysRes
err = c.request(&res, "GET", "/1/keys", nil, read, opts)
keys = res.Keys
return
}
func (c *client) MoveIndex(source, destination string) (UpdateTaskRes, error) {
return c.MoveIndexWithRequestOptions(source, destination, nil)
}
func (c *client) MoveIndexWithRequestOptions(source, destination string, opts *RequestOptions) (UpdateTaskRes, error) {
return c.operation(source, destination, "move", nil, opts)
}
func (c *client) CopyIndex(source, destination string) (UpdateTaskRes, error) {
return c.CopyIndexWithRequestOptions(source, destination, nil)
}
func (c *client) CopyIndexWithRequestOptions(source, destination string, opts *RequestOptions) (UpdateTaskRes, error) {
return c.ScopedCopyIndexWithRequestOptions(source, destination, nil, opts)
}
func (c *client) ScopedCopyIndex(source, destination string, scopes []string) (UpdateTaskRes, error) {
return c.ScopedCopyIndexWithRequestOptions(source, destination, scopes, nil)
}
func (c *client) ScopedCopyIndexWithRequestOptions(source, destination string, scopes []string, opts *RequestOptions) (UpdateTaskRes, error) {
return c.operation(source, destination, "copy", scopes, opts)
}
func (c *client) operation(src, dst, op string, scopes []string, opts *RequestOptions) (res UpdateTaskRes, err error) {
if err = checkScopes(scopes); err != nil {
return
}
o := IndexOperation{
Destination: dst,
Operation: op,
Scopes: scopes,
}
path := "/1/indexes/" + url.QueryEscape(src) + "/operation"
err = c.request(&res, "POST", path, o, write, opts)
return
}
func (c *client) DeleteIndex(name string) (res DeleteTaskRes, err error) {
return c.DeleteIndexWithRequestOptions(name, nil)
}
func (c *client) DeleteIndexWithRequestOptions(name string, opts *RequestOptions) (res DeleteTaskRes, err error) {
index := c.InitIndex(name)
return index.DeleteWithRequestOptions(opts)
}
func (c *client) ClearIndex(name string) (res UpdateTaskRes, err error) {
return c.ClearIndexWithRequestOptions(name, nil)
}
func (c *client) ClearIndexWithRequestOptions(name string, opts *RequestOptions) (res UpdateTaskRes, err error) {
index := c.InitIndex(name)
return index.ClearWithRequestOptions(opts)
}
func (c *client) AddUserKey(ACL []string, params Map) (AddKeyRes, error) {
return c.AddAPIKey(ACL, params)
}
func (c *client) AddAPIKey(ACL []string, params Map) (res AddKeyRes, err error) {
return c.AddAPIKeyWithRequestOptions(ACL, params, nil)
}
func (c *client) AddAPIKeyWithRequestOptions(ACL []string, params Map, opts *RequestOptions) (res AddKeyRes, err error) {
req := duplicateMap(params)
req["acl"] = ACL
if err = checkKey(req); err != nil {
return
}
err = c.request(&res, "POST", "/1/keys/", req, write, opts)
return
}
func (c *client) UpdateUserKey(key string, params Map) (UpdateKeyRes, error) {
return c.UpdateAPIKey(key, params)
}
func (c *client) UpdateAPIKey(key string, params Map) (res UpdateKeyRes, err error) {
return c.UpdateAPIKeyWithRequestOptions(key, params, nil)
}
func (c *client) UpdateAPIKeyWithRequestOptions(key string, params Map, opts *RequestOptions) (res UpdateKeyRes, err error) {
if err = checkKey(params); err != nil {
return
}
path := "/1/keys/" + url.QueryEscape(key)
err = c.request(&res, "PUT", path, params, write, opts)
return
}
func (c *client) GetUserKey(key string) (Key, error) {
return c.GetAPIKey(key)
}
func (c *client) GetAPIKey(key string) (res Key, err error) {
return c.GetAPIKeyWithRequestOptions(key, nil)
}
func (c *client) GetAPIKeyWithRequestOptions(key string, opts *RequestOptions) (res Key, err error) {
path := "/1/keys/" + url.QueryEscape(key)
err = c.request(&res, "GET", path, nil, read, opts)
return
}
func (c *client) DeleteUserKey(key string) (DeleteRes, error) {
return c.DeleteAPIKey(key)
}
func (c *client) DeleteAPIKey(key string) (res DeleteRes, err error) {
return c.DeleteAPIKeyWithRequestOptions(key, nil)
}
func (c *client) DeleteAPIKeyWithRequestOptions(key string, opts *RequestOptions) (res DeleteRes, err error) {
path := "/1/keys/" + url.QueryEscape(key)
err = c.request(&res, "DELETE", path, nil, write, opts)
return
}
func (c *client) GetLogs(params Map) (logs []LogRes, err error) {
return c.GetLogsWithRequestOptions(params, nil)
}
func (c *client) GetLogsWithRequestOptions(params Map, opts *RequestOptions) (logs []LogRes, err error) {
var res getLogsRes
if err = checkGetLogs(params); err != nil {
return
}
err = c.request(&res, "GET", "/1/logs", params, write, opts)
logs = res.Logs
return
}
func (c *client) MultipleQueries(queries []IndexedQuery, strategy string) (res []MultipleQueryRes, err error) {
return c.MultipleQueriesWithRequestOptions(queries, strategy, nil)
}
func (c *client) MultipleQueriesWithRequestOptions(queries []IndexedQuery, strategy string, opts *RequestOptions) (res []MultipleQueryRes, err error) {
if strategy == "" {
strategy = "none"
}
for _, q := range queries {
if err = checkQuery(q.Params); err != nil {
return
}
}
requests := make([]map[string]string, len(queries))
for i, q := range queries {
requests[i] = map[string]string{
"indexName": q.IndexName,
"params": encodeMap(q.Params),
}
}
body := Map{
"requests": requests,
"strategy": strategy,
}
var m multipleQueriesRes
err = c.request(&m, "POST", "/1/indexes/*/queries", body, search, opts)
res = m.Results
return
}
func (c *client) ListClusters() (res []Cluster, err error) {
return c.ListClustersWithRequestOptions(nil)
}
func (c *client) ListClustersWithRequestOptions(opts *RequestOptions) (res []Cluster, err error) {
var rawRes map[string][]Cluster
var ok bool
err = c.request(&rawRes, "GET", "/1/clusters", nil, read, opts)
if res, ok = rawRes["clusters"]; !ok {
res = nil
err = errors.New("missing field `clusters` in JSON response")
}
return
}
func (c *client) ListUserIDs(page int, hitsPerPage int) (res ListUserIDsRes, err error) {
return c.ListUserIDsWithRequestOptions(page, hitsPerPage, nil)
}
func (c *client) ListUserIDsWithRequestOptions(page int, hitsPerPage int, opts *RequestOptions) (res ListUserIDsRes, err error) {
params := Map{
"page": page,
"hitsPerPage": hitsPerPage,
}
err = c.request(&res, "GET", "/1/clusters/mapping?"+encodeMap(params), nil, read, opts)
return
}
func (c *client) GetUserID(userID string) (res UserID, err error) {
return c.GetUserIDWithRequestOptions(userID, nil)
}
func (c *client) GetUserIDWithRequestOptions(userID string, opts *RequestOptions) (res UserID, err error) {
err = c.request(&res, "GET", "/1/clusters/mapping/"+url.QueryEscape(userID), nil, read, opts)
return
}
func (c *client) AssignUserID(userID string, clusterName string) (res AssignUserIDRes, err error) {
return c.AssignUserIDWithRequestOptions(userID, clusterName, nil)
}
func (c *client) AssignUserIDWithRequestOptions(userID string, clusterName string, opts *RequestOptions) (res AssignUserIDRes, err error) {
if opts == nil {
opts = &RequestOptions{
ExtraHeaders: make(map[string]string),
}
}
opts.ExtraHeaders["X-Algolia-User-ID"] = userID
body := map[string]string{"cluster": clusterName}
err = c.request(&res, "POST", "/1/clusters/mapping", body, write, opts)
return
}
func (c *client) RemoveUserID(userID string) (res RemoveUserIDRes, err error) {
return c.RemoveUserIDWithRequestOptions(userID, nil)
}
func (c *client) RemoveUserIDWithRequestOptions(userID string, opts *RequestOptions) (res RemoveUserIDRes, err error) {
if opts == nil {
opts = &RequestOptions{
ExtraHeaders: make(map[string]string),
}
}
opts.ExtraHeaders["X-Algolia-User-ID"] = userID
err = c.request(&res, "DELETE", "/1/clusters/mapping", nil, write, opts)
return
}
func (c *client) GetTopUserIDs() (res TopUserIDs, err error) {
return c.GetTopUserIDsWithRequestOptions(nil)
}
func (c *client) GetTopUserIDsWithRequestOptions(opts *RequestOptions) (res TopUserIDs, err error) {
err = c.request(&res, "GET", "/1/clusters/mapping/top", nil, read, opts)
return
}
func (c *client) SearchUserIDs(query string, params Map) (res SearchUserIDRes, err error) {
return c.SearchUserIDsWithRequestOptions(query, params, nil)
}
func (c *client) SearchUserIDsWithRequestOptions(query string, params Map, opts *RequestOptions) (res SearchUserIDRes, err error) {
params["query"] = query
err = c.request(&res, "POST", "/1/clusters/mapping/search", params, read, opts)
return
}
func (c *client) Batch(operations []BatchOperationIndexed) (res MultipleBatchRes, err error) {
return c.BatchWithRequestOptions(operations, nil)
}
func (c *client) BatchWithRequestOptions(operations []BatchOperationIndexed, opts *RequestOptions) (res MultipleBatchRes, err error) {
// TODO: Use check functions of index.go
request := map[string][]BatchOperationIndexed{
"requests": operations,
}
err = c.request(&res, "POST", "/1/indexes/*/batch", request, write, opts)
return
}
func (c *client) WaitTask(indexName string, taskID int) error {
return c.WaitTaskWithRequestOptions(indexName, taskID, nil)
}
func (c *client) WaitTaskWithRequestOptions(indexName string, taskID int, opts *RequestOptions) error {
var maxDuration = time.Second
for {
res, err := c.GetStatusWithRequestOptions(indexName,
taskID, opts)
if err != nil {
return err
}
if res.Status == "published" {
return nil
}
sleepDuration := randDuration(maxDuration)
time.Sleep(sleepDuration)
// Increase the upper boundary used to generate the sleep duration
if maxDuration < 10*time.Minute {
maxDuration *= 2
if maxDuration > 10*time.Minute {
maxDuration = 10 * time.Minute
}
}
}
}
func (c *client) GetStatus(indexName string, taskID int) (res TaskStatusRes, err error) {
return c.GetStatusWithRequestOptions(indexName, taskID, nil)
}
func (c *client) GetStatusWithRequestOptions(indexName string, taskID int, opts *RequestOptions) (res TaskStatusRes, err error) {
path := fmt.Sprintf("/1/indexes/%s/task/%d", url.QueryEscape(indexName), taskID)
err = c.request(&res, "GET", path, nil, read, opts)
return
}
func (c *client) CopySettings(source, destination string) (UpdateTaskRes, error) {
return c.CopySettingsWithRequestOptions(source, destination, nil)
}
func (c *client) CopySettingsWithRequestOptions(source, destination string, opts *RequestOptions) (UpdateTaskRes, error) {
return c.ScopedCopyIndexWithRequestOptions(source, destination, []string{"settings"}, opts)
}
func (c *client) CopySynonyms(source, destination string) (UpdateTaskRes, error) {
return c.CopySynonymsWithRequestOptions(source, destination, nil)
}
func (c *client) CopySynonymsWithRequestOptions(source, destination string, opts *RequestOptions) (UpdateTaskRes, error) {
return c.ScopedCopyIndexWithRequestOptions(source, destination, []string{"synonyms"}, opts)
}
func (c *client) CopyRules(source, destination string) (UpdateTaskRes, error) {
return c.CopyRulesWithRequestOptions(source, destination, nil)
}
func (c *client) CopyRulesWithRequestOptions(source, destination string, opts *RequestOptions) (UpdateTaskRes, error) {
return c.ScopedCopyIndexWithRequestOptions(source, destination, []string{"rules"}, opts)
}
func (c *client) SetPersonalizationStrategy(strategy Strategy) (SetStrategyRes, error) {
return c.SetPersonalizationStrategyWithRequestOptions(strategy, nil)
}
func (c *client) SetPersonalizationStrategyWithRequestOptions(strategy Strategy, opts *RequestOptions) (res SetStrategyRes, err error) {
path := "/1/recommendation/personalization/strategy"
err = c.request(&res, "POST", path, strategy, write, opts)
return
}
func (c *client) GetPersonalizationStrategy() (Strategy, error) {
return c.GetPersonalizationStrategyWithRequestOptions(nil)
}
func (c *client) GetPersonalizationStrategyWithRequestOptions(opts *RequestOptions) (strategy Strategy, err error) {
path := "/1/recommendation/personalization/strategy"
err = c.request(&strategy, "GET", path, nil, read, opts)
return
}
func (c *client) RestoreAPIKey(key string) (AddKeyRes, error) {
return c.RestoreAPIKeyWithRequestOptions(key, nil)
}
func (c *client) RestoreAPIKeyWithRequestOptions(key string, opts *RequestOptions) (res AddKeyRes, err error) {
path := fmt.Sprintf("/1/keys/%s/restore", key)
err = c.request(&res, "POST", path, nil, write, opts)
return
}
func (c *client) request(res interface{}, method, path string, body interface{}, typeCall int, opts *RequestOptions) error {
r, err := c.transport.request(method, path, body, typeCall, opts)
if err != nil {
return err
}
return json.Unmarshal(r, res)
}
|
/**
* Licensed to Jasig under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Jasig licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.ssp.util.importer.job.validation.map.metadata.validation.violation;
import java.util.List;
import org.jasig.ssp.util.importer.job.validation.map.metadata.utils.MapReference;
import org.jasig.ssp.util.importer.job.validation.map.metadata.validation.MapViolation;
public class MissingPrimaryKeyViolation implements MapViolation {
String violation;
MapReference mapReference;
List<String> missingKeys;
public MissingPrimaryKeyViolation(MapReference mapReference, List<String> missingKeys){
this.mapReference = mapReference;
this.missingKeys = missingKeys;
//super(mapReference, stringBuilder.toString(), "Header missing key column, unable to process");
}
@Override
public String getViolation() {
// TODO Auto-generated method stub
return violation;
}
@Override
public void setViolation(String violation) {
this.violation = violation;
}
public String getTableName(){
return mapReference.getTableName();
}
@Override
public String buildMessage() {
return "Header missing key column, unable to process for table: "
+ mapReference.getTableName() + " " + buildColumnList();
}
private String buildColumnList(){
StringBuilder stringBuilder = new StringBuilder("missing columns: ");
for(String missingKey:missingKeys ){
stringBuilder.append(missingKey + ":");
}
return stringBuilder.toString();
}
@Override
public String buildShortMessage() {
return buildMessage();
}
@Override
public Boolean isTableViolation() {
// TODO Auto-generated method stub
return true;
}
}
|
function getCommonElements(arr1, arr2) {
let commonElements = [];
for (let i = 0; i < arr1.length; i++) {
if (arr2.includes(arr1[i])) {
commonElements.push(arr1[i]);
}
}
return commonElements;
} |
<gh_stars>10-100
package org.chojin.spark.lineage
import java.util.Properties
import grizzled.slf4j.Logger
import scala.collection.JavaConversions._
object Config {
private lazy val LOGGER = Logger[this.type]
private final val prefix = "org.chojin.spark.lineage"
private lazy val properties = {
Option(getClass.getResourceAsStream("/lineage.properties"))
.map({ stream =>
val props = new Properties()
props.load(stream)
stream.close()
props
})
.getOrElse(new Properties())
}
def get(name: String): String = properties.getProperty(name)
def getList(name: String): Seq[String] = Option.apply(properties.getProperty(name))
.flatMap(p => if (p.isEmpty) None else Some(p))
.map(p => p.split("\\s*,\\s*").toSeq)
.getOrElse(Seq())
def createInstanceOf[T](suffix: String): T = {
val propPrefix = s"$prefix.$suffix"
val className = get(propPrefix)
createInstance(className, propPrefix)
}
def createInstancesOf[T](suffix: String): List[T] = {
val propPrefix = s"$prefix.$suffix"
getList(s"${propPrefix}s").map(className => {
createInstance[T](className, propPrefix)
}).toList
}
private def createInstance[T](className: String, prefix: String): T = {
try {
def clazz = getClass.getClassLoader.loadClass(className)
val configKey = clazz.getSimpleName.replaceFirst("Reporter$", "").toLowerCase
val clazzPrefix = s"$prefix.$configKey"
val props = properties
.toMap
.filter({ case (k, _) => k.startsWith(s"$clazzPrefix.")})
.map({ case (k, v) => k.substring(clazzPrefix.length + 1) -> v})
LOGGER.debug(s"Properties -> $props")
clazz
.getConstructor(classOf[Map[String, String]])
.newInstance(props)
.asInstanceOf[T]
} catch {
case e: Throwable => {
LOGGER.error(s"Unable to create instance of $className", e)
throw e
}
}
}
}
|
if [ -e /etc/audit/audit.rules ]; then
AUDIT_RULES_FILE="/etc/audit/audit.rules"
AUDIT_TAG="-k perm_mod"
elif [ -e /etc/audit.rules ]; then
AUDIT_RULES_FILE="/etc/audit.rules"
AUDIT_TAG=""
else
exit
fi
if [ "`grep -v '#' ${AUDIT_RULES_FILE} | grep -c ' -S fchownat '`" = "0" ]; then
if [ "`uname -p`" = "x86_64" ]; then
echo "-a exit,always -F arch=b64 -S fchownat ${AUDIT_TAG}" >>${AUDIT_RULES_FILE}
else
echo "-a exit,always -F arch=b32 -S fchownat ${AUDIT_TAG}" >>${AUDIT_RULES_FILE}
fi
fi
service auditd restart 1>/dev/null
|
import React from 'react';
import ReactDOM from "react-dom";
import { Router, Route, browserHistory, IndexRoute } from 'react-router';
//main layout
import Master from './Master';
import Login from './Login';
import Register from './Register';
import Second from './second';
import Passcode from './Passcode';
import UserProfile from "./userprofile";
import WidgetList from "./widget";
import UserList from './userlist';
import Email from './Email';
export default class Routes extends React.Component {
render() {
return (
<Router history={browserHistory}>
<Route component={Master}>
<Route path="/" component={Login} />
<Route path="/register" component={Register} />
<Route path="/passcode/:email" component={Passcode} />
<Route path="/forgot" component={Email} />
<Route path="/users">
<Route component={Second}>
<IndexRoute component={UserList} />
</Route>
<Route path=":userId" component={UserProfile} />
</Route>
<Route path="/widgets">
<Route component={Second}>
<IndexRoute component={WidgetList} />
</Route>
</Route>
</Route>
</Router>
);
}
}
if (document.getElementById('root')) {
ReactDOM.render(<Routes/>, document.getElementById('root'));
}
|
git clone https://github.com/devtron-labs/devtron.git
cd devtron
git remote add target https://${INPUT_TARGET_USERNAME}:${INPUT_TARGET_TOKEN}@${INPUT_TARGET_URL#https://}
git checkout main
git remote -v
sed -i "s/LTAG=.*/LTAG=\"main\";/" manifests/installation-script
sed -i "s/REPO_RAW_URL=.*/REPO_RAW_URL= https:\/\/gitee.com\/devtron-labs\/devtron\/raw/" manifests/installation-script
sed -i "s/url:.*/url: https:\/\/gitee.com\/devtron-labs\/devtron\/raw\/main\/manifests\/installation-script/" manifests/install/devtron-installer.yaml
sed -i "29 s/value:.*/value: https:\/\/gitee.com\/devtron-labs\/devtron.git/" manifests/yamls/migrator.yaml
sed -i "65 s/value:.*/value: https:\/\/gitee.com\/devtron-labs\/devtron.git/" manifests/yamls/migrator.yaml
sed -i "103 s/value:.*/value: https:\/\/gitee.com\/devtron-labs\/git-sensor.git/" manifests/yamls/migrator.yaml
sed -i "141 s/value:.*/value: https:\/\/gitee.com\/devtron-labs\/lens.git/" manifests/yamls/migrator.yaml
git config --global user.email ${INPUT_TARGET_USEREMAIL}
git config --global user.name ${INPUT_TARGET_USERNAME}
git add .
git commit -m "updated github link to gitee"
git push -f --all target
|
const fs = require('fs')
const nps = require('path')
const utils = require('util')
const ruleMatch = require('./rule-match')
function filesToTree(root, file, filter) {
if (!fs.existsSync(file)) throw new Error('Not found file: ' + file)
let stat = fs.statSync(file);
if (stat.isFile()) {
if (ruleMatch(filter, file)) {
return {
type: 'file',
file: file.substring(root.length).replace(/^\//, ''),
lastmod: stat.mtime
}
}
}
else {
let files = fs.readdirSync(file)
files = files.map(f => filesToTree(file, nps.join(file, f), filter)).filter(Boolean)
return {
type: 'dir',
file: file.substring(root.length).replace(/^\//, ''),
lastmod: stat.mtime,
files
};
}
}
function file2Tree(root, filter) {
root = nps.resolve(root);
let tree = filesToTree(root, root, filter);
if (!tree) return tree;
tree.file = root;
return tree;
}
module.exports = file2Tree; |
#ifndef _MR_GUI_H_
#define _MR_GUI_H_
#include <mobrend/renderer.h>
namespace mr
{
class Gui
{
public:
typedef struct
{
void *windowHandle;
struct Renderer::gui_init_info_s *rendererInitInfo;
} CreateParams;
public:
Gui(CreateParams params);
~Gui();
void BeginFrame();
void EndFrame();
};
} // namespace mr
#endif
|
<reponame>henrytao-me/download-manager
/*
* Copyright 2016 "<NAME> <<EMAIL>>"
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.henrytao.downloadmanager.sample.ui.home;
import android.content.Context;
import android.databinding.ObservableField;
import android.net.Uri;
import android.os.Environment;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
import me.henrytao.downloadmanager.DownloadManager;
import me.henrytao.downloadmanager.Request;
import me.henrytao.downloadmanager.sample.App;
import me.henrytao.downloadmanager.sample.ui.base.BaseViewModel;
import me.henrytao.mvvmlifecycle.rx.UnsubscribeLifeCycle;
import rx.android.schedulers.AndroidSchedulers;
import rx.schedulers.Schedulers;
/**
* Created by henrytao on 7/1/16.
*/
public class HomeViewModel extends BaseViewModel {
private final Context mContext;
public ObservableField<String> progress = new ObservableField<>();
private long mDownloadId;
public HomeViewModel() {
mContext = App.getInstance().getApplicationContext();
}
@Override
public void onCreateView() {
super.onCreateView();
}
public void onDownloadClicked() {
mDownloadId = new Request.Builder(Uri.parse("http://download.mysquar.com.s3.amazonaws.com/apk/mychat/mychat.apk"))
.setDestPath(Uri.fromFile(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS)))
.setTempPath(Uri.fromFile(mContext.getCacheDir()))
.build()
.enqueue();
manageSubscription("download-progress", DownloadManager.getInstance().observe(mDownloadId)
.debounce(500, TimeUnit.MILLISECONDS)
.map(info -> {
int percentage = info.getContentLength() > 0 ? (int) ((100 * info.getBytesRead()) / info.getContentLength()) : 0;
return String.format(Locale.US, "Progress %s | %d%%", info.getId(), percentage);
})
.distinctUntilChanged()
.subscribeOn(Schedulers.computation())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(percentage -> {
progress.set(percentage);
}, Throwable::printStackTrace), UnsubscribeLifeCycle.DESTROY_VIEW);
}
public void onPauseClicked() {
DownloadManager.getInstance().pause(mDownloadId);
}
public void onResumeClicked() {
DownloadManager.getInstance().resume(mDownloadId);
}
}
|
#!/bin/bash
#SBATCH -J Act_selu_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=2000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/progs/meta.py selu 1 Adadelta 4 0.7100767098788312 31 0.9661654078128048 rnormal PE-infersent
|
echo "MNIST 5"
ipython ./experiments_mnist5_cnn.py
echo "MNIST 10"
ipython ./experiments_mnist10_cnn.py
echo "CIFAR 10"
ipython ./experiments_cifar10.py
echo "CIFAR 100"
ipython ./experiments_cifar100.py
echo "CIFAR 80"
ipython ./experiments_cifar80.py
|
import java.util.Date;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
@Path("currenttime")
public class MyResource {
@GET
@Produces(MediaType.TEXT_PLAIN)
public String getCurrentTime() {
Date date = new Date();
return date.toString();
}
} |
/*
* Copyright © 2021 Apple Inc. and the ServiceTalk project authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.servicetalk.utils.internal;
import java.time.Duration;
import javax.annotation.Nullable;
import static java.time.Duration.ZERO;
import static java.time.Duration.ofNanos;
import static java.util.Objects.requireNonNull;
/**
* Helper utilities for {@link Duration}.
*/
public final class DurationUtils {
/**
* Maximum positive duration which can be expressed as a signed 64-bit number of nanoseconds.
*/
private static final Duration LONG_MAX_NANOS = ofNanos(Long.MAX_VALUE);
/**
* Maximum negative duration which can be expressed as a signed 64-bit number of nanoseconds.
*/
private static final Duration LONG_MIN_NANOS = ofNanos(Long.MIN_VALUE);
private DurationUtils() {
// No instances
}
/**
* Checks if the duration is positive, excluding zero.
*
* @param duration the {@link Duration} to validate
* @return {@code true} if the passed duration is greater than {@link Duration#ZERO}, {@code false} otherwise
*/
public static boolean isPositive(final Duration duration) {
return ZERO.compareTo(duration) < 0;
}
/**
* Ensures the duration is positive, excluding zero.
*
* @param duration the {@link Duration} to validate
* @param name name of the {@link Duration} variable
* @return the passed duration if all checks pass
* @throws NullPointerException if the passed duration is {@code null}
* @throws IllegalArgumentException if the passed duration is not greater than {@link Duration#ZERO}
*/
public static Duration ensurePositive(final Duration duration, final String name) {
if (!isPositive(requireNonNull(duration, name))) {
throw new IllegalArgumentException(name + ": " + duration + " (expected > 0)");
}
return duration;
}
/**
* Checks if the duration is considered "infinite".
*
* @param duration the {@link Duration} to validate
* @param maxDuration the max {@link Duration} limit
* @return {@code true} if the passed duration is {@code null} or exceeds the {@code maxDuration}, {@code false}
* otherwise
*/
public static boolean isInfinite(@Nullable final Duration duration, final Duration maxDuration) {
return duration == null || maxDuration.compareTo(duration) < 0;
}
/**
* Converts a {@code Duration} to nanoseconds or if the resulting value would overflow a 64-bit signed integer then
* either {@code Long.MIN_VALUE} or {@code Long.MAX_VALUE} as appropriate.
*
* @param duration the duration to convert
* @return the converted nanoseconds value
*/
public static long toNanos(final Duration duration) {
return duration.compareTo(LONG_MAX_NANOS) < 0 ?
(duration.compareTo(LONG_MIN_NANOS) > 0 ? duration.toNanos() : Long.MIN_VALUE)
: Long.MAX_VALUE;
}
}
|
# Find Maximum and Minimum using python
def maxMin(list):
max = list[0]
min = list[0]
for i in range(len(list)):
if list[i] > max:
max = list[i]
if list[i] < min:
min = list[i]
return max, min
# Main Program
list = [5, 2, 8, 9, 3, 6, 1]
maximum, minimum = maxMin(list)
print("Maximum number is: {}".format(maximum))
print("Minimum number is: {}".format(minimum)) |
<reponame>neuling/fso-livetest-chrome-extension<filename>src/public/default-rules/static-head-open-graph-title.js
function(page,done)
{
let that = this;
let dom = page.getStaticDom();
let og_title = dom.querySelector('meta[property="og:title"]');
if(og_title && og_title.content)
{
done(that.createResult('HEAD', 'Open Graph (Facebook) title: "'+og_title.content+'"'+that.partialCodeLink(og_title), 'info'));return;
}
done();
} |
declare module '*.css';
declare module '*.less';
declare module '*.png';
declare module '*-icon.svg';
declare module 'vconsole';
declare module 'postcss-px-to-viewport';
declare module 'less-var-parse';
|
"use strict";
import { Client, Message, MessageEmbed } from "discord.js";
import { MessageMentions } from "discord.js";
import ee from "../../botconfig/embed.json";
import Logger from "../../lib/logger";
module.exports = {
name: "say",
category: "Fun",
cooldown: 2,
usage: "say <TEXT>",
description: "Resends your Text",
run: async (client: Client, message: Message, args: string[]) => {
const logger = new Logger("Commands - Say");
const text = args[0];
try {
if (!args[0])
return message.channel.send({
embeds: [
new MessageEmbed()
.setColor("RED")
.setFooter(ee.footertext, ee.footericon)
.setTitle(`❌ ERROR | You didn't provided a Text`)
.setDescription(`Usage: \`say <TEXT>\``),
],
});
if (
MessageMentions.USERS_PATTERN.test(text) ||
MessageMentions.ROLES_PATTERN.test(text)
)
return message.reply("I am not allowed to mention roles or users");
message.channel.send(text);
} catch (e: any) {
logger.error(String(e.stack));
return message.channel.send({
embeds: [
new MessageEmbed()
.setColor("RED")
.setFooter(ee.footertext, ee.footericon)
.setTitle(`❌ ERROR | An error occurred`)
.setDescription(`\`\`\`${e.stack}\`\`\``),
],
});
}
},
};
/** Template by Tomato#6966 | https://github.com/Tomato6966/Discord-Js-Handler-Template */
|
import React from 'react'
const WrapperContainer = ({ children }) => {
return (
<div className="bg-base-200 w-full">
<div className="w-full max-w-[1360px] mx-auto my-5 md:my-10">
{children}
</div>
</div>
)
}
export default WrapperContainer
|
#!/bin/sh
#===============================================================================
# Copyright 2016-2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#===============================================================================
MKLURLROOT="https://github.com/intel/mkl-dnn/releases/download/v0.13/"
MKLVERSION="2018.0.2.20180127"
os=`uname`
if [ "$os" = "Linux" ]; then
MKLPACKAGE="mklml_lnx_${MKLVERSION}.tgz"
elif [ "$os" = "Darwin" ]; then
MKLPACKAGE="mklml_mac_${MKLVERSION}.tgz"
else
echo "Cannot identify operating system. Try downloading package manually."
exit 1
fi
MKLURL=${MKLURLROOT}${MKLPACKAGE}
DST=`dirname $0`/../external
mkdir -p $DST
DST=`cd $DST;pwd`
if [ -x "$(command -v curl)" ]; then
curl -L -o "${DST}/${MKLPACKAGE}" "$MKLURL"
elif [ -x "$(command -v wget)" ]; then
wget -O "${DST}/${MKLPACKAGE}" "$MKLURL"
else
echo "curl or wget not available"
exit 1
fi
if [ \! $? ]; then
echo "Download from $MKLURL to $DST failed"
exit 1
fi
tar -xzf "$DST/${MKLPACKAGE}" -C $DST
echo "Downloaded and unpacked Intel(R) MKL small libraries to $DST"
|
public class UserProfile implements User {
private String id;
private String name;
private String email;
private String imageUrl;
public UserProfile(String id, String name, String email, String imageUrl) {
this.id = id;
this.name = name;
this.email = email;
this.imageUrl = imageUrl;
}
@Override
public String getId() {
return id;
}
@Override
public String getName() {
return name;
}
@Override
public String getEmail() {
return email;
}
@Override
public String getImageUrl() {
return imageUrl;
}
} |
#!/bin/bash
# © Copyright IBM Corporation 2019, 2020
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
if [ "$(uname -m)" = "x86_64" ] ; then export ARCH="amd64" ; else export ARCH=$(uname -m) ; fi
# if DOCKER_USER is set, authenticate with docker.io to mitigate rate limit (https://www.docker.com/increase-rate-limits)
if [ -n "$DOCKER_USER" ] ; then echo 'Authenticating with docker.io...' && docker login -u $DOCKER_USER -p $DOCKER_PASS docker.io ; fi
if [ "$PUSH_MANIFEST_ONLY" = true ] ; then
echo 'Retrieving remote tagcache' && echo -en 'travis_fold:start:retrieve-tag-cache\\r'
./travis-build-scripts/artifact-util.sh -c ${CACHE_PATH} -u ${REPOSITORY_USER} -p ${REPOSITORY_CREDENTIAL} -f cache/${TAGCACHE_FILE} -l ./.tagcache --get
echo -en 'travis_fold:end:retrieve-tag-cache\\r'
make push-manifest
./travis-build-scripts/cleanup-cache.sh
exit 0
fi
echo 'Downgrading Docker (if necessary)...' && echo -en 'travis_fold:start:docker-downgrade\\r'
eval "$DOCKER_DOWNGRADE"
echo -en 'travis_fold:end:docker-downgrade\\r'
## Build images
./travis-build-scripts/build.sh
## Test images
./travis-build-scripts/test.sh
## Push images
if [ -z "$BUILD_INTERNAL_LEVEL" ] ; then
if [ "$BUILD_ALL" = true ] ; then
./travis-build-scripts/push.sh developer
./travis-build-scripts/push.sh production
fi
else
if [[ "$BUILD_INTERNAL_LEVEL" == *".DE"* ]]; then
./travis-build-scripts/push.sh developer
else
./travis-build-scripts/push.sh production
fi
fi
if [ "$LTS" = true ] ; then
./travis-build-scripts/push.sh production
fi
|
from keras.preprocessing import image
from keras.applications.vgg19 import preprocess_input, decode_predictions
import numpy as np
def model_predict(img_path, model):
img = image.load_img(img_path, target_size=(224, 224))
img_array = image.img_to_array(img)
img_array = np.expand_dims(img_array, axis=0)
img_array = preprocess_input(img_array)
predictions = model.predict(img_array)
decoded_predictions = decode_predictions(predictions, top=3)[0]
results = []
for _, label, confidence in decoded_predictions:
results.append((label, confidence))
return results |
from django import forms
from common.models import Officer
class OfficerForm(forms.ModelForm):
class Meta:
model = Officer
fields = ('birth_year', 'star', 'officer_last', 'id', 'officer_first', 'gender', 'rank', 'unit', 'appt_date',
'race',)
|
<reponame>vupham79/banana
/**
* Angular wrapper for Bryntum slider Widget
*/
import { Component, OnInit, EventEmitter, ElementRef, Input, Output } from '@angular/core';
// UMD bundle is used to support IE11 browser. If you don't need it just use "import { ... } from 'bryntum-gantt'" instead
import { WidgetHelper } from 'bryntum-gantt/gantt.umd.js';
@Component({
selector : 'bry-slider',
template : ''
})
export class SliderComponent implements OnInit {
private elementRef : ElementRef;
public slider : any;
@Input() max : Number = 100;
@Input() min : Number = 0;
@Input() onChange : Function = () => {};
@Input() showTooltip : Boolean;
@Input() showValue : Boolean;
@Input() step : Number = 5;
@Input() text : String = 'Slider';
@Input() value : Number = 0;
@Output() change : EventEmitter<any> = new EventEmitter;
/**
* Saves element to have container to render the button to
* @param element
*/
constructor(element : ElementRef) {
this.elementRef = element;
}
ngOnInit() {
const slider = WidgetHelper.createWidget({
type : 'slider',
appendTo : this.elementRef.nativeElement,
max : this.max,
min : this.min,
onChange : this.onChange,
showTooltip : this.showTooltip,
showValue : this.showValue,
step : this.step,
text : this.text,
value : this.value
});
this.slider = slider;
}
/**
* Destroys the Bryntum slider
*/
ngOnDestroy() {
if (this.slider) {
this.slider.destroy();
}
}
}
|
#!/bin/bash
set -o errexit
set -o pipefail
set -x
set -e
err_report() {
echo "Error on line $1"
}
trap 'err_report $LINENO' ERR
kops delete cluster $CLUSTER_NAME --yes
|
#!/bin/sh
${KORE_EXEC:?} test-simple-arithmetic-vdefinition.kore --module KWASM-LEMMAS --prove test-simple-arithmetic-spec.kore --spec-module SIMPLE-ARITHMETIC-SPEC "$@"
|
import { IDecorator, Named } from "..";
export class Decorator extends Named implements IDecorator {
private _arguments: any[] = [];
get Arguments() {
return this._arguments as ReadonlyArray<any>;
}
static parseObjects(objs: ArrayLike<IDecorator>) {
return super.genericParseObjects(Decorator, objs);
}
ToObject(): IDecorator {
return {
Arguments: this.Arguments,
Name: this.Name
};
}
ParseObject(obj: IDecorator) {
this
.SetName(obj.Name)
.AddArgument(...Array.from(obj.Arguments));
return this;
}
AddArgument(...args: any[]) {
return this.addToArray(this._arguments, ...args);
}
RemoveArgument(argument) {
const foundArgIndex = this._arguments.findIndex((arg) => argument === arg);
this._arguments.splice(foundArgIndex, 1);
return this;
}
}
|
<reponame>nksafelab/calligraphy
package com.jinke.calligraphy.app.branch;
import com.jinke.single.BitmapCount;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Paint;
public class ShareLogo {
private volatile static Bitmap logoBitmap;
private ShareLogo() {}
public static Bitmap getInstance() {
if(logoBitmap == null) {
synchronized(ShareLogo.class) {
if(logoBitmap == null) {
// logoBitmap = BitmapFactory.decodeFile(MyView.FILE_PATH_HEADER + "/logo.png").
// copy(Bitmap.Config.ARGB_8888, true);
logoBitmap = BitmapFactory.decodeFile(MyView.FILE_PATH_HEADER + "/logo.png").
copy(Bitmap.Config.ARGB_4444, true);
BitmapCount.getInstance().createBitmap("ShareLogo decode logo.png");
}
}
}
return logoBitmap;
}
public static Bitmap addLogo(Bitmap dst) {
Canvas c = new Canvas(dst);
int xPos = dst.getWidth() - getInstance().getWidth() - 10;
int yPos = dst.getHeight() - getInstance().getHeight() - 10;
c.drawBitmap(getInstance(), xPos, yPos, new Paint());
return dst;
}
}
|
/*
** EPITECH PROJECT, 2020
** engine
** File description:
** multiple calculation for physics handling
*/
#include "../../include/dependancies.h"
float internal__get_magnitude(sfVector2f vector)
{
if (abs((int)sqrt(pow(vector.x, 2) + pow(vector.y, 2))) < 50)
return (sqrt(pow(vector.x, 2) + pow(vector.y, 2)));
return (1.0f);
}
float internal__vector_angle(sfVector2f spot_1, sfVector2f spot_2)
{
float angle = 0.0f;
sfVector2f vector;
vector.x = spot_2.x - spot_1.x;
vector.y = spot_2.y - spot_1.y;
angle = atan2(vector.y, vector.x) * 57 * -1;
return (angle);
}
float internal__dot_product(sfVector2f spot_1, sfVector2f spot_2)
{
float product = 0.0f;
float angle = internal__vector_angle(spot_1, spot_2);
product = internal__get_magnitude(spot_1)*
internal__get_magnitude(spot_2) * cos(angle);
return (product);
}
|
def median_arr(arr):
arr.sort()
mid = int(len(arr)/2)
if len(arr) % 2 == 0:
median = 0.5 * (arr[mid-1] + arr[mid])
else:
median = arr[mid]
return median |
number = starting_number
while number <= 10:
print(number)
number += 1 |
#!/bin/bash
set -e
if [ "$#" -ne 1 ]; then
echo "Usage: scripts/hackage-docs.sh HACKAGE_USER"
exit 1
fi
user=$1
cabal_file=$(find . -maxdepth 1 -name "*.cabal" -print -quit)
if [ ! -f "$cabal_file" ]; then
echo "Run this script in the top-level package directory"
exit 1
fi
pkg=$(awk -F ":[[:space:]]*" 'tolower($1)=="name" { print $2 }' < "$cabal_file")
ver=$(awk -F ":[[:space:]]*" 'tolower($1)=="version" { print $2 }' < "$cabal_file")
if [ -z "$pkg" ]; then
echo "Unable to determine package name"
exit 1
fi
if [ -z "$ver" ]; then
echo "Unable to determine package version"
exit 1
fi
echo "Detected package: $pkg-$ver"
dir=$(mktemp -d build-docs.XXXXXX)
trap 'rm -r "$dir"' EXIT
# cabal haddock --hoogle --hyperlink-source --html-location='/package/$pkg-$version/docs' --contents-location='/package/$pkg-$version'
stack haddock
cp -R .stack-work/dist/x86_64-linux/Cabal-1.18.1.5/doc/html/$pkg/ $dir/$pkg-$ver-docs
tar cvz -C $dir --format=ustar -f $dir/$pkg-$ver-docs.tar.gz $pkg-$ver-docs
curl -X PUT \
-H 'Content-Type: application/x-tar' \
-H 'Content-Encoding: gzip' \
-u "$user" \
--data-binary "@$dir/$pkg-$ver-docs.tar.gz" \
"https://hackage.haskell.org/package/$pkg-$ver/docs"
|
class Room:
def __init__(self, name, sector_id, checksum):
self.name = name
self.sector_id = sector_id
self.checksum = checksum
def is_real(self):
letter_count = {}
for char in self.name.replace('-', ''):
if char.isalpha():
letter_count[char] = letter_count.get(char, 0) + 1
sorted_letters = sorted(letter_count.items(), key=lambda x: (-x[1], x[0]))
calculated_checksum = ''.join([letter[0] for letter in sorted_letters[:5]])
return calculated_checksum == self.checksum
def decrypt_rooms(datafile):
def decrypt_name(name, shift):
decrypted_name = ''
for char in name:
if char.isalpha():
shifted_char = chr(((ord(char) - ord('a') + shift) % 26) + ord('a'))
decrypted_name += shifted_char
else:
decrypted_name += char
return decrypted_name
def load_rooms(datafile):
rooms = []
with open(datafile, 'r') as file:
for line in file:
parts = line.strip().split('-')
name = '-'.join(parts[:-1])
sector_checksum = parts[-1].split('[')
sector_id = int(sector_checksum[0].split('-')[-1])
checksum = sector_checksum[1][:-2]
rooms.append(Room(name, sector_id, checksum))
return rooms
rooms = load_rooms(datafile)
sector_sum = 0
for room in rooms:
if room.is_real():
sector_sum += room.sector_id
decrypted_name = decrypt_name(room.name, room.sector_id)
print(f"Decrypted name for {room.name} is {decrypted_name}")
return sector_sum
# Example usage
datafile = 'room_data.txt'
print(decrypt_rooms(datafile)) # Output: 1514 |
#!/bin/bash
# Copyright 2015 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -eu
# Main deploy functions for the continous build system
# Just source this file and use the various method:
# bazel_build build bazel and run all its test
# bazel_release use the artifact generated by bazel_build and push
# them to github for a release and to GCS for a release candidate.
# Also prepare an email for announcing the release.
# Load common.sh
SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
source $(dirname ${SCRIPT_DIR})/release/common.sh
: ${GIT_REPOSITORY_URL:=https://github.com/bazelbuild/bazel}
: ${GCS_BASE_URL:=https://storage.googleapis.com}
: ${GCS_BUCKET:=bucket-o-bazel}
: ${GCS_APT_BUCKET:=bazel-apt}
: ${EMAIL_TEMPLATE_RC:=${SCRIPT_DIR}/rc_email.txt}
: ${EMAIL_TEMPLATE_RELEASE:=${SCRIPT_DIR}/release_email.txt}
: ${RELEASE_CANDIDATE_URL:="${GCS_BASE_URL}/${GCS_BUCKET}/%release_name%/rc%rc%/index.html"}
: ${RELEASE_URL="${GIT_REPOSITORY_URL}/releases/tag/%release_name%"}
: ${BOOTSTRAP_BAZEL:=bazel}
PLATFORM="$(uname -s | tr 'A-Z' 'a-z')"
if [[ ${PLATFORM} == "darwin" ]] || [[ ${PLATFORM} == "freebsd" ]] ; then
function checksum() {
(cd "$(dirname "$1")" && shasum -a 256 "$(basename "$1")")
}
else
function checksum() {
(cd "$(dirname "$1")" && sha256sum "$(basename "$1")")
}
fi
function setup_android_repositories() {
if [ ! -f WORKSPACE.bak ] && [ -n "${ANDROID_SDK_PATH-}" ]; then
cp WORKSPACE WORKSPACE.bak
trap '[ -f WORKSPACE.bak ] && rm WORKSPACE && mv WORKSPACE.bak WORKSPACE' \
EXIT
# Make sure that WORKSPACE ends with a newline, otherwise we'll end up with
# a syntax error.
echo >>WORKSPACE
cat >>WORKSPACE <<EOF
android_sdk_repository(
name = "androidsdk",
path = "${ANDROID_SDK_PATH}",
)
EOF
if [ -n "${ANDROID_NDK_PATH-}" ]; then
cat >>WORKSPACE <<EOF
android_ndk_repository(
name = "androidndk",
path = "${ANDROID_NDK_PATH}",
)
EOF
fi
fi
}
# Main entry point for building bazel.
# It sets the embed label to the release name if any, calls the whole
# test suite, compile the various packages, then copy the artifacts
# to the folder in $1
function bazel_build() {
local release_label="$(get_full_release_name)"
local embed_label_opts=
if [ -n "${release_label}" ]; then
export EMBED_LABEL="${release_label}"
fi
if [[ "${JAVA_VERSION-}" =~ ^(1\.)?7$ ]]; then
JAVA_VERSION=1.7
release_label="${release_label}-jdk7"
else
JAVA_VERSION=1.8
fi
# Build the packages
local ARGS=
if [[ $PLATFORM == "darwin" ]] && \
xcodebuild -showsdks 2> /dev/null | grep -q '\-sdk iphonesimulator'; then
ARGS="--define IPHONE_SDK=1"
fi
local OPTIONAL_TARGETS="//site:jekyll-tree //scripts/packages //src/tools/benchmark/webapp:site"
if [[ $PLATFORM =~ "freebsd" ]] ; then
OPTIONAL_TARGETS=
fi
${BOOTSTRAP_BAZEL} --bazelrc=${BAZELRC:-/dev/null} --nomaster_bazelrc build \
--embed_label=${release_label} --stamp \
--workspace_status_command=scripts/ci/build_status_command.sh \
--define JAVA_VERSION=${JAVA_VERSION} \
${ARGS} \
//src:bazel \
${OPTIONAL_TARGETS} || exit $?
if [ -n "${1-}" ]; then
# Copy the results to the output directory
mkdir -p $1/packages
cp bazel-bin/src/bazel $1/bazel
# The version with a bundled JDK may not exist on all platforms.
if [ "${JAVA_VERSION}" = "1.8" -a -e "bazel-bin/scripts/packages/with-jdk/install.sh" ]; then
cp bazel-bin/scripts/packages/with-jdk/install.sh $1/bazel-${release_label}-installer.sh
cp bazel-bin/scripts/packages/without-jdk/install.sh $1/bazel-${release_label}-without-jdk-installer.sh
else
cp bazel-bin/scripts/packages/without-jdk/install.sh $1/bazel-${release_label}-installer.sh
fi
if [ "$PLATFORM" = "linux" ]; then
cp bazel-bin/scripts/packages/debian/bazel-debian.deb $1/bazel_${release_label}.deb
cp -f bazel-genfiles/scripts/packages/debian/bazel.dsc $1/bazel.dsc
cp -f bazel-genfiles/scripts/packages/debian/bazel.tar.gz $1/bazel.tar.gz
if [ "${JAVA_VERSION}" = "1.8" ]; then
cp bazel-genfiles/bazel-distfile.zip $1/bazel-${release_label}-dist.zip
fi
fi
cp bazel-genfiles/site/jekyll-tree.tar $1/docs.bazel.build.tar
cp bazel-bin/src/tools/benchmark/webapp/site.tar $1/perf.bazel.build.tar.nobuild
cp bazel-genfiles/scripts/packages/README.md $1/README.md
fi
}
# Generate a string from a template and a list of substitutions.
# The first parameter is the template name and each subsequent parameter
# is taken as a couple: first is the string the substitute and the second
# is the result of the substitution.
function generate_from_template() {
local value="$1"
shift
while (( $# >= 2 )); do
value="${value//$1/$2}"
shift 2
done
echo "${value}"
}
# Generate the email for the release.
# The first line of the output will be the recipient, the second line
# the mail subjects and the subsequent lines the mail, its content.
# If no planed release, then this function output will be empty.
function generate_email() {
local release_name=$(get_release_name)
local rc=$(get_release_candidate)
local args=(
"%release_name%" "${release_name}"
"%rc%" "${rc}"
"%relnotes%" "# $(get_full_release_notes)"
)
if [ -n "${rc}" ]; then
args+=(
"%url%"
"$(generate_from_template "${RELEASE_CANDIDATE_URL}" "${args[@]}")"
)
generate_from_template "$(cat ${EMAIL_TEMPLATE_RC})" "${args[@]}"
elif [ -n "${release_name}" ]; then
args+=(
"%url%"
"$(generate_from_template "${RELEASE_URL}" "${args[@]}")"
)
generate_from_template "$(cat ${EMAIL_TEMPLATE_RELEASE})" "${args[@]}"
fi
}
# Deploy a github release using a third party tool:
# https://github.com/c4milo/github-release
# This methods expects the following arguments:
# $1..$n files generated by package_build (should not contains the README file)
# Please set GITHUB_TOKEN to talk to the Github API and GITHUB_RELEASE
# for the path to the https://github.com/c4milo/github-release tool.
# This method is also affected by GIT_REPOSITORY_URL which should be the
# URL to the github repository (defaulted to https://github.com/bazelbuild/bazel).
function release_to_github() {
local url="${GIT_REPOSITORY_URL}"
local release_name=$(get_release_name)
local rc=$(get_release_candidate)
local release_tool="${GITHUB_RELEASE:-$(which github-release 2>/dev/null || true)}"
local gpl_warning="
_Notice_: Bazel installers contain binaries licensed under the GPLv2 with
Classpath exception. Those installers should always be redistributed along with
the source code.
Some versions of Bazel contain a bundled version of OpenJDK. The license of the
bundled OpenJDK and other open-source components can be displayed by running
the command `bazel license`. The vendor and version information of the bundled
OpenJDK can be displayed by running the command `bazel info java-runtime`.
The binaries and source-code of the bundled OpenJDK can be
[downloaded from our mirror server](https://bazel-mirror.storage.googleapis.com/openjdk/index.html).
_Security_: All our binaries are signed with our
[public key](https://bazel.build/bazel-release.pub.gpg) 48457EE0.
"
if [ ! -x "${release_tool}" ]; then
echo "Please set GITHUB_RELEASE to the path to the github-release binary." >&2
echo "This probably means you haven't installed https://github.com/c4milo/github-release " >&2
echo "on this machine." >&2
return 1
fi
local github_repo="$(echo "$url" | sed -E 's|https?://github.com/([^/]*/[^/]*).*$|\1|')"
if [ -n "${release_name}" ] && [ -z "${rc}" ]; then
mkdir -p "${tmpdir}/to-github"
cp "${@}" "${tmpdir}/to-github"
"${GITHUB_RELEASE}" "${github_repo}" "${release_name}" "" "# $(git_commit_msg) ${gpl_warning}" "${tmpdir}/to-github/"'*'
fi
}
# Creates an index of the files contained in folder $1 in mardown format
function create_index_md() {
# First, add the README.md
local file=$1/__temp.md
if [ -f $1/README.md ]; then
cat $1/README.md
fi
# Then, add the list of files
echo
echo "## Index of files"
echo
# Security notice
echo "_Security_: All our binaries are signed with our"
echo "[public key](https://bazel.build/bazel-release.pub.gpg) 48457EE0."
echo
for f in $1/*.sha256; do # just list the sha256 ones
local filename=$(basename $f .sha256);
echo " - [${filename}](${filename}) [[SHA-256](${filename}.sha256)] [[SIG](${filename}.sig)]"
done
}
# Creates an index of the files contained in folder $1 in HTML format
# It supposes hoedown (https://github.com/hoedown/hoedown) is on the path,
# if not, set the HOEDOWN environment variable to the good path.
function create_index_html() {
local hoedown="${HOEDOWN:-$(which hoedown 2>/dev/null || true)}"
# Second line is to trick hoedown to behave as Github
create_index_md "${@}" \
| sed -E 's/^(Baseline.*)$/\1\
/' | sed 's/^ + / - /' | sed 's/_/\\_/g' \
| "${hoedown}"
}
function get_gsutil() {
local gs="${GSUTIL:-$(which gsutil 2>/dev/null || true) -m}"
if [ ! -x "${gs}" ]; then
echo "Please set GSUTIL to the path the gsutil binary." >&2
echo "gsutil (https://cloud.google.com/storage/docs/gsutil/) is the" >&2
echo "command-line interface to google cloud." >&2
exit 1
fi
echo "${gs}"
}
# Deploy a release candidate to Google Cloud Storage.
# It requires to have gsutil installed. You can force the path to gsutil
# by setting the GSUTIL environment variable. The GCS_BUCKET should be the
# name of the Google cloud bucket to deploy to.
# This methods expects the following arguments:
# $1..$n files generated by package_build
function release_to_gcs() {
local gs="$(get_gsutil)"
local release_name="$(get_release_name)"
local rc="$(get_release_candidate)"
if [ -z "${GCS_BUCKET-}" ]; then
echo "Please set GCS_BUCKET to the name of your Google Cloud Storage bucket." >&2
return 1
fi
if [ -n "${release_name}" ]; then
local release_path="${release_name}/release"
if [ -n "${rc}" ]; then
release_path="${release_name}/rc${rc}"
fi
# Make a temporary folder with the desired structure
local dir="$(mktemp -d ${TMPDIR:-/tmp}/tmp.XXXXXXXX)"
local prev_dir="$PWD"
trap "{ cd ${prev_dir}; rm -fr ${dir}; }" EXIT
mkdir -p "${dir}/${release_path}"
cp "${@}" "${dir}/${release_path}"
# Add a index.html file:
create_index_html "${dir}/${release_path}" \
>"${dir}/${release_path}"/index.html
cd ${dir}
"${gs}" -m cp -a public-read -r . "gs://${GCS_BUCKET}"
cd "${prev_dir}"
rm -fr "${dir}"
trap - EXIT
fi
}
function ensure_gpg_secret_key_imported() {
(gpg --list-secret-keys | grep "${APT_GPG_KEY_ID}" > /dev/null) || \
gpg --allow-secret-key-import --import "${APT_GPG_KEY_PATH}"
# Make sure we use stronger digest algorithm。
# We use reprepro to generate the debian repository,
# but there's no way to pass flags to gpg using reprepro, so writting it into
# ~/.gnupg/gpg.conf
(grep "digest-algo sha256" ~/.gnupg/gpg.conf > /dev/null) || \
echo "digest-algo sha256" >> ~/.gnupg/gpg.conf
}
function create_apt_repository() {
mkdir conf
cat > conf/distributions <<EOF
Origin: Bazel Authors
Label: Bazel
Codename: stable
Architectures: amd64 source
Components: jdk1.7 jdk1.8
Description: Bazel APT Repository
DebOverride: override.stable
DscOverride: override.stable
SignWith: ${APT_GPG_KEY_ID}
Origin: Bazel Authors
Label: Bazel
Codename: testing
Architectures: amd64 source
Components: jdk1.7 jdk1.8
Description: Bazel APT Repository
DebOverride: override.testing
DscOverride: override.testing
SignWith: ${APT_GPG_KEY_ID}
EOF
cat > conf/options <<EOF
verbose
ask-passphrase
basedir .
EOF
# TODO(#2264): this is a quick workaround #2256, figure out a correct fix.
cat > conf/override.stable <<EOF
bazel Section contrib/devel
bazel Priority optional
EOF
cat > conf/override.testing <<EOF
bazel Section contrib/devel
bazel Priority optional
EOF
ensure_gpg_secret_key_imported
local distribution="$1"
local deb_pkg_name_jdk8="$2"
local deb_pkg_name_jdk7="$3"
local deb_dsc_name="$4"
debsign -k ${APT_GPG_KEY_ID} "${deb_dsc_name}"
reprepro -C jdk1.8 includedeb "${distribution}" "${deb_pkg_name_jdk8}"
reprepro -C jdk1.8 includedsc "${distribution}" "${deb_dsc_name}"
reprepro -C jdk1.7 includedeb "${distribution}" "${deb_pkg_name_jdk7}"
reprepro -C jdk1.7 includedsc "${distribution}" "${deb_dsc_name}"
"${gs}" -m cp -a public-read -r dists "gs://${GCS_APT_BUCKET}/"
"${gs}" -m cp -a public-read -r pool "gs://${GCS_APT_BUCKET}/"
}
function release_to_apt() {
local gs="$(get_gsutil)"
local release_name="$(get_release_name)"
local rc="$(get_release_candidate)"
if [ -z "${GCS_APT_BUCKET-}" ]; then
echo "Please set GCS_APT_BUCKET to the name of your GCS bucket for apt repository." >&2
return 1
fi
if [ -z "${APT_GPG_KEY_ID-}" ]; then
echo "Please set APT_GPG_KEY_ID for apt repository." >&2
return 1
fi
if [ -n "${release_name}" ]; then
# Make a temporary folder with the desired structure
local dir="$(mktemp -d ${TMPDIR:-/tmp}/tmp.XXXXXXXX)"
local prev_dir="$PWD"
trap "{ cd ${prev_dir}; rm -fr ${dir}; }" EXIT
mkdir -p "${dir}/${release_name}"
local release_label="$(get_full_release_name)"
local deb_pkg_name_jdk8="${release_name}/bazel_${release_label}-linux-x86_64.deb"
local deb_pkg_name_jdk7="${release_name}/bazel_${release_label}-jdk7-linux-x86_64.deb"
local deb_dsc_name="${release_name}/bazel_${release_label}.dsc"
local deb_tar_name="${release_name}/bazel_${release_label}.tar.gz"
cp "${tmpdir}/bazel_${release_label}-linux-x86_64.deb" "${dir}/${deb_pkg_name_jdk8}"
cp "${tmpdir}/bazel_${release_label}-jdk7-linux-x86_64.deb" "${dir}/${deb_pkg_name_jdk7}"
cp "${tmpdir}/bazel.dsc" "${dir}/${deb_dsc_name}"
cp "${tmpdir}/bazel.tar.gz" "${dir}/${deb_tar_name}"
cd "${dir}"
if [ -n "${rc}" ]; then
create_apt_repository testing "${deb_pkg_name_jdk8}" "${deb_pkg_name_jdk7}" "${deb_dsc_name}"
else
create_apt_repository stable "${deb_pkg_name_jdk8}" "${deb_pkg_name_jdk7}" "${deb_dsc_name}"
fi
cd "${prev_dir}"
rm -fr "${dir}"
trap - EXIT
fi
}
# A wrapper around the release deployment methods.
function deploy_release() {
local github_args=()
# Filters out README.md for github releases
for i in "$@"; do
if ! ( [[ "$i" =~ README.md$ ]] || [[ "$i" =~ bazel.dsc ]] || [[ "$i" =~ bazel.tar.gz ]] || [[ "$i" =~ .nobuild$ ]] ) ; then
github_args+=("$i")
fi
done
local gcs_args=()
# Filters out perf.bazel.*.nobuild
for i in "$@"; do
if ! [[ "$i" =~ .nobuild$ ]] ; then
gcs_args+=("$i")
fi
done
release_to_github "${github_args[@]}"
release_to_gcs "${gcs_args[@]}"
release_to_apt
}
# A wrapper for the whole release phase:
# Compute the SHA-256, and arrange the input
# Sign every binary using gpg and generating .sig files
# Deploy the release
# Generate the email
# Input: $1 $2 [$3 $4 [$5 $6 ...]]
# Each pair denotes a couple (platform, folder) where the platform
# is the platform built for and the folder is the folder where the
# artifacts for this platform are.
# Ouputs:
# RELEASE_EMAIL_RECIPIENT: who to send a mail to
# RELEASE_EMAIL_SUBJECT: the subject of the email to be sent
# RELEASE_EMAIL_CONTENT: the content of the email to be sent
function bazel_release() {
local README=$2/README.md
tmpdir=$(mktemp -d ${TMPDIR:-/tmp}/tmp.XXXXXXXX)
trap 'rm -fr ${tmpdir}' EXIT
ensure_gpg_secret_key_imported
while (( $# > 1 )); do
local platform=$1
local folder=$2
shift 2
for file in $folder/*; do
local filename=$(basename $file)
if [ "$filename" != README.md ]; then
if [ "$filename" == "bazel.dsc" ] || [ "$filename" == "bazel.tar.gz" ] \
|| [[ "$filename" =~ bazel-(.*)-dist\.zip ]] ; then
local destfile=${tmpdir}/$filename
elif [[ "$file" =~ /([^/]*)(\.[^\./]+)$ ]]; then
local destfile=${tmpdir}/${BASH_REMATCH[1]}-${platform}${BASH_REMATCH[2]}
else
local destfile=${tmpdir}/$filename-${platform}
fi
# bazel.tar.gz is duplicated under different platforms
# if the file is already there, skip signing and checksum it again.
if [ ! -f "$destfile" ]; then
mv $file $destfile
checksum $destfile > $destfile.sha256
gpg --no-tty --detach-sign -u "${APT_GPG_KEY_ID}" "$destfile"
fi
fi
done
done
deploy_release $README $(find ${tmpdir} -type f)
export RELEASE_EMAIL="$(generate_email)"
export RELEASE_EMAIL_RECIPIENT="$(echo "${RELEASE_EMAIL}" | head -1)"
export RELEASE_EMAIL_SUBJECT="$(echo "${RELEASE_EMAIL}" | head -2 | tail -1)"
export RELEASE_EMAIL_CONTENT="$(echo "${RELEASE_EMAIL}" | tail -n +3)"
}
# Use jekyll build to build the site and then gsutil to copy it to GCS
# Input: $1 tarball to the jekyll site
# $2 name of the bucket to deploy the site to
# $3 "nobuild" if only publish without build
# It requires to have gsutil installed. You can force the path to gsutil
# by setting the GSUTIL environment variable
function build_and_publish_site() {
tmpdir=$(mktemp -d ${TMPDIR:-/tmp}/tmp.XXXXXXXX)
trap 'rm -fr ${tmpdir}' EXIT
local gs="$(get_gsutil)"
local site="$1"
local bucket="$2"
local nobuild="$3"
if [ ! -f "${site}" ] || [ -z "${bucket}" ]; then
echo "Usage: build_and_publish_site <site-tarball> <bucket>" >&2
return 1
fi
local prod_dir="${tmpdir}"
tar xf "${site}" --exclude=CNAME -C "${tmpdir}"
if [ "$nobuild" != "nobuild" ]; then
jekyll build -s "${tmpdir}" -d "${tmpdir}/production"
prod_dir="${tmpdir}/production"
fi
# Rsync:
# -r: recursive
# -c: compute checksum even though the input is from the filesystem
"${gs}" rsync -r -c "${prod_dir}" "gs://${bucket}"
"${gs}" web set -m index.html -e 404.html "gs://${bucket}"
"${gs}" -m acl ch -R -u AllUsers:R "gs://${bucket}"
}
# Push json file to perf site, also add to file_list
# Input: $1 json file to push
# $2 name of the bucket to deploy the site to
function push_benchmark_output_to_site() {
tmpdir=$(mktemp -d ${TMPDIR:-/tmp}/tmp.XXXXXXXX)
trap 'rm -fr ${tmpdir}' EXIT
local gs="$(get_gsutil)"
local output_file="$1"
local output_file_basename="$(basename ${output_file})"
local bucket="$2"
if [ ! -f "${output_file}" ] || [ -z "${bucket}" ]; then
echo "Usage: push_benchmark_output_to_site <json-file-name> <bucket>" >&2
return 1
fi
# Upload json file
"${gs}" cp "${output_file}" "gs://${bucket}/data/${output_file_basename}"
# Download file_list (it might not exist)
"${gs}" cp "gs://${bucket}/file_list" "${tmpdir}" || true
# Update file_list
local list_file="${tmpdir}/file_list"
echo "${output_file_basename}" >> "${list_file}"
"${gs}" cp "${list_file}" "gs://${bucket}/file_list"
"${gs}" -m acl ch -R -u AllUsers:R "gs://${bucket}"
}
|
<filename>meta.js
module.exports = {
"prompts": {
"name": {
"type": "string",
"required": true,
"message": "Project name"
},
"description": {
"type": "string",
"required": false,
"message": "Project description",
"default": "A webpack project"
},
"author": {
"type": "string",
"message": "Author"
},
},
"completeMessage": "To get started:\n\n cd {{destDirName}}\n npm install\n npm run dev"
};
|
def get_max_depth(tree):
if tree is None:
return 0
max_depth = 0
queue = []
queue.append((tree, 1))
while queue:
node, level = queue.pop(0)
if node is not None:
max_depth = max(max_depth, level)
if node.left:
queue.append((node.left,level+1))
if node.right:
queue.append((node.right,level+1))
return max_depth |
#!/bin/bash
. ./configuration.sh
. ./functions.sh
PROGRAM=$1
ssh_wrapper $TARGET_BIN_DIR/$PROGRAM
|
<reponame>Grasea/Grandroid2<filename>grandroid-actions/src/main/java/com/grasea/grandroid/actions/ThreadAction.java<gh_stars>1-10
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.grasea.grandroid.actions;
import android.app.ProgressDialog;
import android.content.Context;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
/**
*
* @author Rovers
*/
public abstract class ThreadAction extends ContextAction implements Runnable {
/**
*
*/
protected long delayMSecond = 0;//毫秒
/**
*
*/
protected Handler handler;
/**
*
*/
protected Thread th;
/**
*
* @param context
*/
public ThreadAction(Context context) {
this(context, "");
}
/**
*
* @param context
* @param actionName
*/
public ThreadAction(Context context, String actionName) {
super(context, actionName);
th = new Thread(this);
th.start();
}
/**
*
* @param context
* @param actionName
* @param message
*/
public ThreadAction(Context context, String actionName, String message) {
this(context, actionName, message, new Action());
}
/**
*
* @param context
* @param actionName
* @param message
* @param callback
*/
public ThreadAction(Context context, String actionName, String message, final Action callback) {
this(context, actionName, message, false, callback);
}
/**
*
* @param context
* @param actionName
* @param message
* @param callback
*/
public ThreadAction(Context context, String actionName, String message, boolean cancelable, final Action callback) {
super(context, actionName);
//TODO cancelable為false時,應補上timeout機制
final ProgressDialog progress = new ProgressDialog(context);
progress.setCancelable(cancelable);
progress.setMessage(message);
handler = new Handler() {
@Override
public void handleMessage(Message msg) {
try {
progress.dismiss();
callback.execute();
} catch (Exception ex) {
Log.e("grandroid", null, ex);
}
}
};
progress.show();
th = new Thread(this);
th.start();
}
/**
*
* @param context
* @param delayMSecond
*/
public ThreadAction(Context context, long delayMSecond) {
super(context);
this.delayMSecond = delayMSecond;
th = new Thread(this);
th.start();
}
/**
*
*/
public void run() {
try {
if (delayMSecond > 0) {
Thread.sleep(delayMSecond);
}
this.execute();
if (handler != null) {
handler.sendEmptyMessage(0);
}
} catch (InterruptedException ex) {
Log.e("grandroid", null, ex);
}
}
/**
*
*/
public void interrupt() {
th.interrupt();
}
public boolean isInterrupt() {
return th.isInterrupted();
}
}
|
// Assuming the existence of a TextView with id "textViewMessage" in the fragment_my.xml layout file
View view = inflater.inflate(R.layout.fragment_my, container, false);
String text = getArguments().getString("text");
// Find the TextView in the inflated view and set the text
TextView textViewMessage = view.findViewById(R.id.textViewMessage);
textViewMessage.setText(text); |
import React from "react";
import styled from "emotion/react";
import PropTypes from "prop-types";
import Island from "../misc/island";
import Title from "../misc/title";
const WithdrawLayout = styled(Island)`
width: 350px;
display: flex;
flex-direction: column;
background-color: #353536;
position: relative;
color: #fff;
box-sizing: border-box;
`;
const WithdrawLayoutError = styled(Island)`
width: 350px;
display: flex;
flex-direction: column;
background-color: #7f0d00;
position: relative;
color: #fff;
box-sizing: border-box;
`;
const CheckIcom = styled.div`
width: 48px;
height: 48px;
background-image: url(/assets/round-check.svg);
position: absolute;
top: 14;
right: 20;
`;
const ErrorIcom = styled.div`
width: 48px;
height: 48px;
background-image: url(/assets/round-error.svg);
background-size: 48px;
position: absolute;
top: 14;
right: 20;
-webkit-filter: invert(100%);
filter: invert(100%);
`;
const Header = styled(Title)`
color: #fff;
`;
const SectionGroup = styled.div`
margin-bottom: 20px;
`;
const Section = styled.div`
margin-bottom: 20px;
width: 100%;
`;
const SectionLabel = styled.div`
font-size: 13px;
text-align: left;
`;
const SectionValue = styled.div`
font-size: 13px;
letter-spacing: 0.6px;
`;
const RepeatPayment = styled.button`
font-size: 13px;
background-color: rgba(0, 0, 0, 0.08);
height: 42px;
display: flex;
justify-content: center;
align-items: center;
border: none;
width: 100%;
position: absolute;
left: 0;
bottom: 0;
cursor: pointer;
text-transform: uppercase;
`;
export const PaymentSuccess = ({ activeCard, transaction, repeatPayment }) => {
const { sum, to } = transaction;
return (
<WithdrawLayout>
<CheckIcom />
<SectionGroup>
<Header>Перевод на карту выполнен</Header>
<Section>
<SectionLabel>Название платежа:</SectionLabel>
<SectionValue>Перевод на привязанную карту</SectionValue>
</Section>
<Section>
<SectionLabel>Карта на которую переводили:</SectionLabel>
<SectionValue>{to}</SectionValue>
</Section>
<Section>
<SectionLabel>Сумма:</SectionLabel>
<SectionValue>
{sum} {activeCard.currencySign}
</SectionValue>
</Section>
</SectionGroup>
<RepeatPayment onClick={repeatPayment}>
Отправить еще один перевод
</RepeatPayment>
</WithdrawLayout>
);
};
PaymentSuccess.propTypes = {
activeCard: PropTypes.object,
transaction: PropTypes.shape({
sum: PropTypes.string,
number: PropTypes.string
}).isRequired,
repeatPayment: PropTypes.func.isRequired
};
export const PaymentError = ({
activeCard,
transaction,
repeatPayment,
error
}) => {
const { sum, to } = transaction;
return (
<WithdrawLayoutError>
<ErrorIcom />
<SectionGroup>
<Header>Ошибка</Header>
<Section>
<SectionLabel>Название платежа:</SectionLabel>
<SectionValue>Перевод на привязанную карту</SectionValue>
</Section>
<Section>
<SectionLabel>Карта на которую переводили:</SectionLabel>
<SectionValue>{to}</SectionValue>
</Section>
<Section>
<SectionLabel>Сумма:</SectionLabel>
<SectionValue>
{sum} {activeCard.currencySign}
</SectionValue>
</Section>
<Section>
<SectionLabel>Ошибка:</SectionLabel>
<SectionValue>{error}</SectionValue>
</Section>
</SectionGroup>
<RepeatPayment onClick={repeatPayment}>
Отправить еще один перевод
</RepeatPayment>
</WithdrawLayoutError>
);
};
PaymentError.propTypes = {
activeCard: PropTypes.object,
transaction: PropTypes.shape({
sum: PropTypes.string,
number: PropTypes.string
}).isRequired,
repeatPayment: PropTypes.func.isRequired,
error: PropTypes.string.isRequired
};
|
package com.ramusthastudio.mypreloaddata;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.SQLException;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteStatement;
import java.util.ArrayList;
import static android.provider.BaseColumns._ID;
import static com.ramusthastudio.mypreloaddata.DatabaseContract.MahasiswaColumns.NAMA;
import static com.ramusthastudio.mypreloaddata.DatabaseContract.MahasiswaColumns.NIM;
import static com.ramusthastudio.mypreloaddata.DatabaseContract.TABLE_NAME;
public final class MahasiswaHelper {
private Context context;
private DatabaseHelper dataBaseHelper;
private SQLiteDatabase database;
public MahasiswaHelper(Context context) {
this.context = context;
}
public MahasiswaHelper open() throws SQLException {
dataBaseHelper = new DatabaseHelper(context);
database = dataBaseHelper.getWritableDatabase();
return this;
}
public void close() {
dataBaseHelper.close();
}
public ArrayList<MahasiswaModel> getDataByName(String nama) {
String result = "";
Cursor cursor = database.query(TABLE_NAME, null, NAMA + " LIKE ?", new String[] {nama}, null, null, _ID + " ASC", null);
cursor.moveToFirst();
ArrayList<MahasiswaModel> arrayList = new ArrayList<>();
MahasiswaModel mahasiswaModel;
if (cursor.getCount() > 0) {
do {
mahasiswaModel = new MahasiswaModel();
mahasiswaModel.setId(cursor.getInt(cursor.getColumnIndexOrThrow(_ID)));
mahasiswaModel.setName(cursor.getString(cursor.getColumnIndexOrThrow(NAMA)));
mahasiswaModel.setNim(cursor.getString(cursor.getColumnIndexOrThrow(NIM)));
arrayList.add(mahasiswaModel);
cursor.moveToNext();
} while (!cursor.isAfterLast());
}
cursor.close();
return arrayList;
}
public ArrayList<MahasiswaModel> getAllData() {
Cursor cursor = database.query(TABLE_NAME, null, null, null, null, null, _ID + " ASC", null);
cursor.moveToFirst();
ArrayList<MahasiswaModel> arrayList = new ArrayList<>();
MahasiswaModel mahasiswaModel;
if (cursor.getCount() > 0) {
do {
mahasiswaModel = new MahasiswaModel();
mahasiswaModel.setId(cursor.getInt(cursor.getColumnIndexOrThrow(_ID)));
mahasiswaModel.setName(cursor.getString(cursor.getColumnIndexOrThrow(NAMA)));
mahasiswaModel.setNim(cursor.getString(cursor.getColumnIndexOrThrow(NIM)));
arrayList.add(mahasiswaModel);
cursor.moveToNext();
} while (!cursor.isAfterLast());
}
cursor.close();
return arrayList;
}
public long insert(MahasiswaModel mahasiswaModel) {
ContentValues initialValues = new ContentValues();
initialValues.put(NAMA, mahasiswaModel.getName());
initialValues.put(NIM, mahasiswaModel.getNim());
return database.insert(TABLE_NAME, null, initialValues);
}
public void beginTransaction() {
database.beginTransaction();
}
public void setTransactionSuccess() {
database.setTransactionSuccessful();
}
public void endTransaction() {
database.endTransaction();
}
public void insertTransaction(MahasiswaModel mahasiswaModel) {
String sql = "INSERT INTO " + TABLE_NAME + " (" + NAMA + ", " + NIM
+ ") VALUES (?, ?)";
SQLiteStatement stmt = database.compileStatement(sql);
stmt.bindString(1, mahasiswaModel.getName());
stmt.bindString(2, mahasiswaModel.getNim());
stmt.execute();
stmt.clearBindings();
}
public int update(MahasiswaModel mahasiswaModel) {
ContentValues args = new ContentValues();
args.put(NAMA, mahasiswaModel.getName());
args.put(NIM, mahasiswaModel.getNim());
return database.update(TABLE_NAME, args, _ID + "= '" + mahasiswaModel.getId() + "'", null);
}
public int delete(int id) {
return database.delete(TABLE_NAME, _ID + " = '" + id + "'", null);
}
}
|
<gh_stars>0
package com.algorand.starter.demo.model;
public class WalletTransfer {
private String idempotencyKey;
private TransferEntity source;
private TransferEntity destination;
private Amount amount;
public WalletTransfer(String idempotencyKey, TransferEntity source, TransferEntity destination, Amount amount) {
super();
this.idempotencyKey = idempotencyKey;
this.source = source;
this.destination = destination;
this.amount = amount;
}
public String getIdempotencyKey() {
return idempotencyKey;
}
public void setIdempotencyKey(String idempotencyKey) {
this.idempotencyKey = idempotencyKey;
}
public Amount getAmount() {
return amount;
}
public void setAmount(Amount amount) {
this.amount = amount;
}
public TransferEntity getSource() {
return source;
}
public void setSource(TransferEntity source) {
this.source = source;
}
public TransferEntity getDestination() {
return destination;
}
public void setDestination(TransferEntity destination) {
this.destination = destination;
}
}
|
/**
* Convert a time to a properly formatted string ("hh:mm:ss" or "d'd' hh:mm" depending on length).
* Null will be converted to '0:00'.
*
* @param time the time to convert to a string
* @returns {string} the formatted time
*/
export const convertToTimeString = (time: number) => {
if (!time) return '0:00';
const days = Math.floor(time / 86400),
hours = Math.floor((time % 86400) / 3600),
minutes = Math.floor((time % 3600) / 60),
seconds = time % 60;
// TODO leave it like this or adjust font size to fit and use 'd:hh:mm:ss'
if (days === 0) {
return `${hours ? `${hours}:${minutes < 10 ? '0' : ''}` : ''}${minutes}:${seconds < 10 ? '0' : ''}${seconds}`;
} else {
return `${days}d ${hours}:${minutes < 10 ? '0' : ''}${minutes}`;
}
};
/**
* Normalize values in array (between 0 and 1).
*
* @param values the array to normalize
* @param useZeroAsMin whether to use zero or the lowest value from array as minimum; default: false (lowest value)
* @returns {*[]} the array with the normalized values
*/
export const getNormalizedValues = (values: number[], useZeroAsMin: boolean = false) => {
let min = useZeroAsMin ? 0 : Math.min.apply(Math, values);
let max = Math.max.apply(Math, values);
let normalized = [];
for (let value of values) {
if (Number.isInteger(value)) {
normalized.push((value - min) / (max - min));
}
}
return normalized;
};
/**
* Run a task at a certain interval.
* If it is already running, stop the old interval first.
*
* @param intervalId the intervalId for this task
* @param task the task/function to execute
* @param interval the interval to run the task (in seconds)
* @returns {number}
*/
export const startTask = (intervalId: NodeJS.Timer, task: () => void, interval: number): NodeJS.Timer => {
stopTask(intervalId)
return setInterval(task, interval * 1000);
};
/**
* Stop the task/interval with the given id.
*
* @param intervalId the id of the interval to stop
* @returns {null} null to reset intervalId
*/
export const stopTask = (intervalId: NodeJS.Timer): null => {
if (intervalId) {
clearInterval(intervalId);
}
return null;
};
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.