text stringlengths 1 1.05M |
|---|
package host
import (
"bytes"
"errors"
"fmt"
"io"
"os"
"os/exec"
"strconv"
"strings"
"github.com/abiosoft/colima/util/terminal"
"github.com/abiosoft/colima/cli"
"github.com/abiosoft/colima/environment"
)
// New creates a new host environment.
func New() environment.Host {
return &hostEnv{}
}
var _ environment.Host = (*hostEnv)(nil)
type hostEnv struct {
env []string
}
func (h hostEnv) WithEnv(env ...string) environment.HostActions {
var newHost hostEnv
// use current and new env vars
newHost.env = append(newHost.env, h.env...)
newHost.env = append(newHost.env, env...)
return newHost
}
func (h hostEnv) Run(args ...string) error {
if len(args) == 0 {
return errors.New("args not specified")
}
cmd := cli.Command(args[0], args[1:]...)
cmd.Env = append(os.Environ(), h.env...)
lineHeight := 6
if cli.Settings.Verbose {
lineHeight = -1 // disable scrolling
}
out := terminal.NewVerboseWriter(lineHeight)
cmd.Stdout = out
cmd.Stderr = out
err := cmd.Run()
if err == nil {
return out.Close()
}
return err
}
func (h hostEnv) RunQuiet(args ...string) error {
if len(args) == 0 {
return errors.New("args not specified")
}
cmd := cli.Command(args[0], args[1:]...)
cmd.Env = append(os.Environ(), h.env...)
var errBuf bytes.Buffer
cmd.Stdout = nil
cmd.Stderr = &errBuf
err := cmd.Run()
if err != nil {
return errCmd(cmd.Args, errBuf, err)
}
return nil
}
func (h hostEnv) RunOutput(args ...string) (string, error) {
if len(args) == 0 {
return "", errors.New("args not specified")
}
cmd := cli.Command(args[0], args[1:]...)
cmd.Env = append(os.Environ(), h.env...)
var buf, errBuf bytes.Buffer
cmd.Stdout = &buf
cmd.Stderr = &errBuf
err := cmd.Run()
if err != nil {
return "", errCmd(cmd.Args, errBuf, err)
}
return strings.TrimSpace(buf.String()), nil
}
func errCmd(args []string, stderr bytes.Buffer, err error) error {
// this is going to be part of a log output,
// reading the first line of the error should suffice
output, _ := stderr.ReadString('\n')
if len(output) > 0 {
output = output[:len(output)-1]
}
return fmt.Errorf("error running %v, output: %s, err: %s", args, strconv.Quote(output), strconv.Quote(err.Error()))
}
func (h hostEnv) RunInteractive(args ...string) error {
if len(args) == 0 {
return errors.New("args not specified")
}
cmd := cli.CommandInteractive(args[0], args[1:]...)
cmd.Env = append(os.Environ(), h.env...)
return cmd.Run()
}
func (h hostEnv) RunWith(stdin io.Reader, stdout io.Writer, args ...string) error {
if len(args) == 0 {
return errors.New("args not specified")
}
cmd := cli.CommandInteractive(args[0], args[1:]...)
cmd.Env = append(os.Environ(), h.env...)
cmd.Stdin = stdin
cmd.Stdout = stdout
var buf bytes.Buffer
cmd.Stderr = &buf
if err := cmd.Run(); err != nil {
return errCmd(cmd.Args, buf, err)
}
return nil
}
func (h hostEnv) Env(s string) string {
return os.Getenv(s)
}
func (h hostEnv) Read(fileName string) (string, error) {
b, err := os.ReadFile(fileName)
return string(b), err
}
func (h hostEnv) Write(fileName, body string) error {
return os.WriteFile(fileName, []byte(body), 0644)
}
func (h hostEnv) Stat(fileName string) (os.FileInfo, error) {
return os.Stat(fileName)
}
// IsInstalled checks if dependencies are installed.
func IsInstalled(dependencies environment.Dependencies) error {
var missing []string
check := func(p string) error {
_, err := exec.LookPath(p)
return err
}
for _, p := range dependencies.Dependencies() {
if check(p) != nil {
missing = append(missing, p)
}
}
if len(missing) > 0 {
return fmt.Errorf("%s not found, run 'brew install %s' to install", strings.Join(missing, ", "), strings.Join(missing, " "))
}
return nil
}
|
#!/bin/bash -l
#
# Sets up the continuum-subtraction job for the case where the
# continuum is represented by the clean model image from the continuum
# imaging
#
# @copyright (c) 2017 CSIRO
# Australia Telescope National Facility (ATNF)
# Commonwealth Scientific and Industrial Research Organisation (CSIRO)
# PO Box 76, Epping NSW 1710, Australia
# atnf-enquiries@csiro.au
#
# This file is part of the ASKAP software distribution.
#
# The ASKAP software distribution is free software: you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the License,
# or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# @author Matthew Whiting <Matthew.Whiting@csiro.au>
#
setContsubFilenames
# If we're here, then CONTSUB_METHOD=CleanModel
# In this bit, we use the clean model from the continuum imaging
# as the input to ccontsubtract
ContsubModelDefinition="# The model definition
CContsubtract.imagetype = ${IMAGETYPE_CONT}
CContsubtract.sources.names = [lsm]
CContsubtract.sources.lsm.direction = \${modelDirection}
CContsubtract.sources.lsm.model = ${contsubCleanModelImage%%.taylor.0}
CContsubtract.sources.lsm.nterms = ${NUM_TAYLOR_TERMS}"
if [ "${NUM_TAYLOR_TERMS}" -gt 1 ]; then
if [ "$MFS_REF_FREQ" == "" ]; then
freq="\${centreFreq}"
else
freq=${MFS_REF_FREQ}
fi
ContsubModelDefinition="$ContsubModelDefinition
CContsubtract.visweights = MFS
CContsubtract.visweights.MFS.reffreq = ${freq}"
fi
cat > "$sbatchfile" <<EOFOUTER
#!/bin/bash -l
${SLURM_CONFIG}
#SBATCH --time=${JOB_TIME_SPECTRAL_CONTSUB}
#SBATCH --ntasks=1
#SBATCH --ntasks-per-node=1
#SBATCH --job-name=${jobname}
${exportDirective}
#SBATCH --output="$slurmOut/slurm-contsubSLsci-%j.out"
${askapsoftModuleCommands}
BASEDIR=${BASEDIR}
cd $OUTPUT
. ${PIPELINEDIR}/utils.sh
# Make a copy of this sbatch file for posterity
sedstr="s/sbatch/\${SLURM_JOB_ID}\.sbatch/g"
thisfile="$sbatchfile"
cp "\$thisfile" "\$(echo "\$thisfile" | sed -e "\$sedstr")"
if [ "${DIRECTION}" != "" ]; then
modelDirection="${DIRECTION}"
else
msMetadata="${MS_METADATA}"
ra=\$(python "${PIPELINEDIR}/parseMSlistOutput.py" --file="\$msMetadata" --val=RA)
dec=\$(python "${PIPELINEDIR}/parseMSlistOutput.py" --file="\$msMetadata" --val=Dec)
epoch=\$(python "${PIPELINEDIR}/parseMSlistOutput.py" --file="\$msMetadata" --val=Epoch)
modelDirection="[\${ra}, \${dec}, \${epoch}]"
fi
centreFreq="\$(python "${PIPELINEDIR}/parseMSlistOutput.py" --file="\$msMetadata" --val=Freq)"
parset="${parsets}/contsub_spectralline_${FIELDBEAM}_\${SLURM_JOB_ID}.in"
log="${logs}/contsub_spectralline_${FIELDBEAM}_\${SLURM_JOB_ID}.log"
cat > "\$parset" <<EOFINNER
# The measurement set name - this will be overwritten
CContSubtract.dataset = ${msSciSL}
${ContsubModelDefinition}
# The gridding parameters
CContSubtract.gridder.snapshotimaging = ${GRIDDER_SNAPSHOT_IMAGING}
CContSubtract.gridder.snapshotimaging.wtolerance = ${GRIDDER_SNAPSHOT_WTOL}
CContSubtract.gridder.snapshotimaging.longtrack = ${GRIDDER_SNAPSHOT_LONGTRACK}
CContSubtract.gridder.snapshotimaging.clipping = ${GRIDDER_SNAPSHOT_CLIPPING}
CContSubtract.gridder = WProject
CContSubtract.gridder.WProject.wmax = ${GRIDDER_WMAX}
CContSubtract.gridder.WProject.nwplanes = ${GRIDDER_NWPLANES}
CContSubtract.gridder.WProject.oversample = ${GRIDDER_OVERSAMPLE}
CContSubtract.gridder.WProject.maxfeeds = 1
CContSubtract.gridder.WProject.maxsupport = ${GRIDDER_MAXSUPPORT}
CContSubtract.gridder.WProject.frequencydependent = true
CContSubtract.gridder.WProject.variablesupport = true
CContSubtract.gridder.WProject.offsetsupport = true
EOFINNER
NCORES=1
NPPN=1
srun --export=ALL --ntasks=\${NCORES} --ntasks-per-node=\${NPPN} ${ccontsubtract} -c "\${parset}" > "\${log}"
err=\$?
rejuvenate ${msSciSL}
extractStats "\${log}" \${NCORES} "\${SLURM_JOB_ID}" \${err} ${jobname} "txt,csv"
if [ \$err != 0 ]; then
exit \$err
else
touch "$CONT_SUB_CHECK_FILE"
fi
EOFOUTER
|
<filename>modules/jooby-pac4j/src/test/java/org/jooby/pac4j/AuthSessionStoreTest.java
package org.jooby.pac4j;
import static org.easymock.EasyMock.expect;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import java.util.Optional;
import javax.inject.Provider;
import org.jooby.Mutant;
import org.jooby.Session;
import org.jooby.internal.pac4j.AuthSerializer;
import org.jooby.test.MockUnit;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.pac4j.core.profile.CommonProfile;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
@RunWith(PowerMockRunner.class)
@PrepareForTest({AuthSessionStore.class, AuthSerializer.class })
public class AuthSessionStoreTest {
@SuppressWarnings("unchecked")
@Test
public void defaults() throws Exception {
new MockUnit(Provider.class)
.run(unit -> {
new AuthSessionStore<>(unit.get(Provider.class));
});
}
@SuppressWarnings({"rawtypes", "unchecked" })
@Test
public void get() throws Exception {
CommonProfile profile = new CommonProfile();
new MockUnit(Provider.class, Session.class)
.expect(unit -> {
Provider provider = unit.get(Provider.class);
expect(provider.get()).andReturn(unit.get(Session.class));
})
.expect(unit -> {
unit.mockStatic(AuthSerializer.class);
expect(AuthSerializer.strToObject("serialized")).andReturn(profile);
Mutant ser = unit.mock(Mutant.class);
expect(ser.toOptional()).andReturn(Optional.of("serialized"));
Session session = unit.get(Session.class);
expect(session.get("pac4jUserProfile.1")).andReturn(ser);
})
.run(unit -> {
CommonProfile result = (CommonProfile) new AuthSessionStore(unit.get(Provider.class))
.get("1").get();
assertEquals(profile, result);
});
}
@SuppressWarnings({"rawtypes", "unchecked" })
@Test
public void getNotFound() throws Exception {
new MockUnit(Provider.class, Session.class)
.expect(unit -> {
Provider provider = unit.get(Provider.class);
expect(provider.get()).andReturn(unit.get(Session.class));
})
.expect(unit -> {
Mutant ser = unit.mock(Mutant.class);
expect(ser.toOptional()).andReturn(Optional.empty());
Session session = unit.get(Session.class);
expect(session.get("pac4jUserProfile.2")).andReturn(ser);
})
.run(unit -> {
Optional<CommonProfile> profile = new AuthSessionStore(unit.get(Provider.class))
.get("2");
assertFalse(profile.isPresent());
});
}
@SuppressWarnings({"rawtypes", "unchecked" })
@Test
public void set() throws Exception {
CommonProfile profile = new CommonProfile();
profile.setId("1");
profile.addAttribute("username", "test");
profile.addAttribute("email", "<EMAIL>");
profile.addPermission("p1");
profile.addPermission("p2");
profile.addRole("r1");
new MockUnit(Provider.class, Session.class)
.expect(unit -> {
Provider provider = unit.get(Provider.class);
expect(provider.get()).andReturn(unit.get(Session.class));
})
.expect(unit -> {
unit.mockStatic(AuthSerializer.class);
expect(AuthSerializer.objToStr(profile)).andReturn("serialized");
Session session = unit.get(Session.class);
expect(session.set("pac4jUserProfile.1", "serialized")).andReturn(session);
})
.run(unit -> {
new AuthSessionStore(unit.get(Provider.class)).set(profile);
});
}
@SuppressWarnings({"unchecked", "rawtypes" })
@Test
public void unset() throws Exception {
CommonProfile profile = new CommonProfile();
new MockUnit(Provider.class, Session.class)
.expect(unit -> {
Provider provider = unit.get(Provider.class);
expect(provider.get()).andReturn(unit.get(Session.class));
})
.expect(unit -> {
unit.mockStatic(AuthSerializer.class);
expect(AuthSerializer.strToObject("serialized")).andReturn(profile);
Mutant ser = unit.mock(Mutant.class);
expect(ser.toOptional()).andReturn(Optional.of("serialized"));
Session session = unit.get(Session.class);
expect(session.unset("pac4jUserProfile.1")).andReturn(ser);
})
.run(unit -> {
CommonProfile result = (CommonProfile) new AuthSessionStore(unit.get(Provider.class))
.unset("1").get();
assertEquals(profile, result);
});
}
}
|
#!/bin/bash
function cleanup() {
killall go # Kill test process if still running.
make kill
make clean
}
function docker_login() {
if docker -v ; then
if [[ -z "$DOCKER_HUB_USER" ]] || [[ -z "$DOCKER_HUB_PASSWORD" ]]; then
echo "Docker Hub login or password env variable empty"
else
docker login -u "$DOCKER_HUB_USER" -p "$DOCKER_HUB_PASSWORD" || echo "Failed to login to Docker Hub"
fi
else
echo "docker binary not found, proceeding without logging in"
fi
}
function post_deploy() {
echo "sleep 10 seconds before checking AIStore processes"
sleep 10
nodes=$(ps -C aisnode -o pid= | wc -l)
echo "number of started aisprocs: $nodes"
if [[ $nodes -lt $1 ]]; then
echo "some of the aisnodes did not start properly"
exit 1
fi
echo "working with build: $(git rev-parse --short HEAD)"
echo "run tests with cloud bucket: ${BUCKET}"
}
# $1 - num_targets; $2 - num_proxies; $3 - num_mountpaths; $4 - $7 - cloud; $8 loopback_mpaths
function deploy() {
cleanup
echo "build required binaries"
make cli aisfs aisloader
targets=$1
proxies=$2
{ echo $targets; echo $proxies; echo $3; echo $4; echo $5; echo $6; echo $7; echo $8; } | MODE="debug" make deploy
export NUM_PROXY=$proxies
export NUM_TARGET=$targets
post_deploy $((targets + proxies))
}
set -o xtrace
source /etc/profile.d/aispaths.sh
source aws.env
source gcs.env
source run.env
cd $AISSRC && cd ..
git fetch --all
branch=${BRANCH:-"origin/master"}
echo "working on branch ${branch}"
git checkout $branch
git reset --hard $branch
git status
git log | head -5
if [[ "${TEST_ETL,,}" == y* ]]; then
# Setting up minikube for the running kubernetes based tests.
pushd deploy/dev/k8s
echo "Deploying Minikube ========================================================"
{ echo y; echo y; } | ./utils/deploy_minikube.sh
# Login to Docker Hub to avoid pulls rate limiting in ETL tests.
docker_login
echo "Deploying AIS on Minikube"
target_cnt=5
proxy_cnt=1
# NOTE: (4 remote providers + aws cache directory + local registry + datascience stack)
{ echo $target_cnt; echo $proxy_cnt; echo 1; echo 6; echo y; echo n; echo n; echo n; echo "$HOME/aws.env"; echo y; echo n; } | MODE="debug" ./utils/deploy_ais.sh
echo "AIS on Minikube deployed"
popd
kubectl logs -f --max-log-requests $(( target_cnt + proxy_cnt )) -l 'type in (aisproxy,aistarget)' & # Send to background, don't show ETL logs.
# Running kubernetes based tests
echo "----- RUNNING K8S TESTS -----"
AIS_ENDPOINT="$(minikube ip):8080" BUCKET="aws://ais-jenkins" RE="TestETL|TestConfig|TestMountpath" make test-run
exit_code=$?
result=$((result + exit_code))
echo "----- K8S TESTS FINISHED WITH: ${exit_code} -----"
# Deleting minikube cluster
./deploy/dev/k8s/stop.sh
# Clean docker images cache - it takes tens of GBs if not cleaned regularly.
if docker -v ; then
docker image prune -a -f
fi
fi
# Running long tests
deploy ${TARGET_COUNT:-6} ${PROXY_COUNT:-6} ${MPATH_COUNT:-4} ${USE_AWS:-y} ${USE_GCP:-y} ${USE_AZURE:-n} ${USE_HDFS:-n} ${USE_LOOPBACK:-y}
for bucket in ${CLOUD_BCKS}; do
echo "----- RUNNING LONG TESTS WITH: ${bucket} -----"
BUCKET=${bucket} make test-long && make test-aisloader
exit_code=$?
result=$((result + exit_code))
echo "----- LONG TESTS FINISHED WITH: ${exit_code} -----"
done
# NOTE: Only the logs from the last make test-long run survive - see function deploy above.
make kill
if [[ $result -ne 0 ]]; then
echo "tests failed"
fi
exit $result
|
import { Component, OnInit } from '@angular/core';
import { Order, OrdersService } from 'app/orders.service';
import { ShoppingCartService } from 'app/shopping-cart.service';
import { Router } from '@angular/router';
declare const $: any;
/**
* Defines the component responsible to manage the order page.
*/
@Component({
selector: 'order',
templateUrl: './order.component.html'
})
export class OrderComponent implements OnInit {
orderForm: any;
private order: Order;
public firstName: string = "";
public lastName: string = "";
public email: string = "";
public phone: string = "";
constructor(public ordersService: OrdersService, public shoppingCartService: ShoppingCartService, private router:Router){}
/**
* Occurs when the component is initialized.
*/
ngOnInit() {
// Initializes the validation of the form. This is the ONLY place where jQuery usage is allowed.
this.orderForm = $('#order-form');
$.validator.addMethod('ccexp', function(value) {
if (!value) {
return false;
}
const regEx = /^(0?[1-9]|1[0-2])\/(0?[1-9]|[1-9][0-9])$/g;
return regEx.test(value);
}, 'La date d\'expiration de votre carte de crédit est invalide.');
this.orderForm.validate({
rules: {
'phone': {
required: true,
phoneUS: true
},
'credit-card': {
required: true,
creditcard: true
},
'credit-card-expiry': {
ccexp: true
}
}
});
}
/**
* Submits the order form.
*/
async submit() {
if (!this.orderForm.valid()) {
return;
}
try{
const id: number = await this.ordersService.getNewId();
const products: {productId: number, quantity: number}[] = await this.shoppingCartService.getCart();
const productsToSend: {id: number, quantity: number}[] = products.map((product) => {
return {id: product.productId,quantity: product.quantity}
})
this.order = {
id,
firstName: this.firstName,
lastName: this.lastName,
email: this.email,
phone: this.phone,
products: productsToSend,
}
const result: boolean = await this.ordersService.sendOrder(this.order);
if(result){
let cartsize = 0;
products.forEach((product) => cartsize += product.quantity)
this.shoppingCartService.deleteAllProducts(cartsize);
this.router.navigate(['/confirmation']);
}
} catch(err){
console.log(err)
}
}
}
|
"""
Author: <NAME>
Python Version 3.8
"""
import logging
from pathlib import Path
import statistics
import plotly
import plotly.express as px
import pandas as pd
Path("../logs/").mkdir(parents=True, exist_ok=True)
logging.basicConfig(filename='../logs/side_by_side_plot.log',
level=0, filemode='w', format='')
def replace_to(value):
new_value = value.replace("2", "-to-")
return new_value
def make_median_list(df):
median_list = []
for rl, c in zip(df["Read Length"], df["Count"]):
for _ in range(c):
median_list.append(int(rl))
continue
median_list.sort()
return median_list
def build_median_graph_vlines(full_data_df):
lines = []
crosses = [c for c in full_data_df["Cross"].unique()]
subtypes = [
# "correctly-sorted",
"mom-to-dad",
"dad-to-mom",
"mom-to-unknown",
"dad-to-unknown",
"unknown-to-mom",
"unknown-to-dad",
]
logging.info(f"Crosses: {crosses}")
cross1 = crosses[0]
cross2 = crosses[1]
# xref-labels go from bottom to top (1-4), so need to assign x_axis backwards
x_axis = len(crosses) * len(subtypes)
for subtype in subtypes:
cross1_data = full_data_df[(full_data_df["Cross"] == cross1) & (
(full_data_df["Subtype"] == subtype))]
sorted_cross1_data = make_median_list(cross1_data)
cross1_median = statistics.median(sorted_cross1_data)
cross2_data = full_data_df[(full_data_df["Cross"] == cross2) & (
(full_data_df["Subtype"] == subtype))]
sorted_cross2_data = make_median_list(cross2_data)
cross2_median = statistics.median(sorted_cross2_data)
cross1_logging_info = f"Cross: {cross1}, Subtype: {subtype}, Median: {cross1_median}, DF_len: {len(cross1_data)}"
cross2_logging_info = f"Cross: {cross2}, Subtype: {subtype}, Median: {cross2_median}, Df_len: {len(cross2_data)}"
logging.info(cross1_logging_info)
logging.info(cross2_logging_info)
lines.append(
dict(
type="line",
yref=f'y{x_axis - 1}',
y0=0,
y1=cross1_data["Count"].max(),
xref=f'x{x_axis - 1}',
x0=cross1_median,
x1=cross1_median,
opacity=0.8,
line=dict(
color="black",
width=1,
)
)
)
lines.append(
dict(
type="line",
yref=f'y{x_axis}',
y0=0,
y1=cross2_data["Count"].max(),
xref=f'x{x_axis}',
x0=cross2_median,
x1=cross2_median,
opacity=0.8,
line=dict(
color="black",
width=1,
)
)
)
x_axis -= 2
return lines
def make_side_by_side_plot():
"""
This script takes in read length distribution data from two crosses
and plots them side-by-side, faceted by incorrectly sorted subtypes
"""
# ---- Input variables ----
cross_1 = "LilBubxPbe53"
file_1 = "../misc_data/LilBubxPbe53_read_length_dist_data.tsv"
cross_2 = "Fca508xPbe14"
file_2 = "../misc_data/Fca508xPbe14_read_length_dist_data.tsv"
# Make output directory for plot if not already made
Path("../plots/").mkdir(parents=True, exist_ok=True)
# Load in files into df
df_1 = pd.read_csv(file_1, sep="\t")
df_2 = pd.read_csv(file_2, sep="\t")
# Add cross names to dataframes
df_1["Cross"] = [str(cross_1)] * len(df_1)
df_2["Cross"] = [str(cross_2)] * len(df_2)
# Concatenate the dataframes
concat_df = pd.concat([df_1, df_2])
# replace '2' with '-to-'
concat_df["Subtype"] = concat_df["Subtype"].apply(replace_to)
# Rename columns
concat_df.columns = ['Read Length', 'Count', 'Subtype', 'Cross']
# Create median line shapes -- NOT USED
# fig_median_lines = build_median_graph_vlines(concat_df)
# Create plot and update settings
fig = px.scatter(
concat_df,
x="Read Length",
y="Count",
color="Subtype",
size_max=2,
facet_col="Cross",
facet_row="Subtype",
facet_row_spacing=0.03,
template="simple_white",
height=800,
width=1000,
render_mode='svg',
category_orders={
"Subtype": [
"mom-to-dad",
"dad-to-mom",
"mom-to-unknown",
"dad-to-unknown",
"unknown-to-mom",
"unknown-to-dad",
],
},
)
fig.update_layout(
legend=dict(
orientation="h",
yanchor="top",
y=-0.1,
xanchor="center",
x=0.5,
title="",
),
font=dict(
family="Arial, monospace",
size=12,
),
margin=dict(
l=5,
r=5,
),
# shapes=fig_median_lines,
)
fig.update_traces(marker=dict(size=4))
fig.update_xaxes(
tickmode='linear',
tick0=0,
dtick=20000,
range=[0, 160000]
)
fig.update_yaxes(matches=None)
fig.for_each_annotation(lambda a: a.update(text=a.text.split("=")[-1]))
# fig.show()
fig.write_image(
f"../plots/{cross_1}_{cross_2}_Read_Length_Distribution.svg",
format='svg',
engine="kaleido",
)
return
if __name__ == "__main__":
make_side_by_side_plot()
|
<reponame>kandarpck/leetcode<filename>trees/count_islands.py
class Solution:
def numIslands(self, grid):
"""
:type grid: List[List[str]]
:rtype: int
Time: O(n*m)
Space: O(h) space of the internal dfs stack
"""
count = 0
for i in range(len(grid)):
for j in range(len(grid[0])):
if grid[i][j] == 1:
self.dfs(grid, i, j)
count += 1
return count
def dfs(self, grid, i, j):
if i < 0 or i >= len(grid) or j < 0 or j >= len(grid[0]) or grid[i][j] != 1:
return
grid[i][j] = '#'
print(grid)
self.dfs(grid, i - 1, j)
self.dfs(grid, i + 1, j)
self.dfs(grid, i, j - 1)
self.dfs(grid, i, j + 1)
class Solution2:
def numIslandsNonDestructive(self, grid):
"""
:type grid: List[List[str]]
:rtype: int
Time: O(n*m)
Space: O(h) space of the internal dfs stack
"""
def dfs(i, j):
if i < 0 or i >= len(grid) or j < 0 or j >= len(grid[0]) or not grid[i][j] or visited[i][j]:
return
visited[i][j] = True
print(visited)
dfs(i - 1, j)
dfs(i + 1, j)
dfs(i, j - 1)
dfs(i, j + 1)
count = 0
visited = [[False for _ in range(len(grid[0]))] for _ in range(len(grid))]
for i in range(len(grid)):
for j in range(len(grid[0])):
if grid[i][j] == 1 and not visited[i][j]:
dfs(i, j)
count += 1
return count
if __name__ == '__main__':
print(Solution().numIslands([[1, 0, 0, 0, 1, 0, 1],
[1, 0, 0, 1, 1, 0, 0]]))
print(Solution2().numIslandsNonDestructive([[1, 0, 0, 0, 1, 0, 1],
[1, 0, 0, 1, 1, 0, 0]]))
|
<reponame>bitbrain/braingdx<filename>core/src/main/java/de/bitbrain/braingdx/util/Updateable.java
package de.bitbrain.braingdx.util;
/**
* Indicates that the implementation of this interface is updateable per frame.
*/
public interface Updateable {
void update(float delta);
}
|
<reponame>leongaban/redux-saga-exchange
export { default as BalanceLayout } from './BalanceLayout/BalanceLayout';
export { default as MBalanceLayout } from './BalanceLayout/mobile/MBalanceLayout';
export { default as ConfirmWithdrawalLayout } from './ConfirmWithdrawalLayout/ConfirmWithdrawalLayout';
export { default as MOperationHistoryLayout } from './MOperationHistoryLayout/MOperationHistoryLayout';
|
def show_keyword_help(cur, arg):
"""
Call the built-in "show <command>", to display help for an SQL keyword.
:param cur: cursor
:param arg: string
:return: list
"""
keyword = arg.strip('"').strip("'")
query = "help '{0}'".format(keyword)
log.debug(query)
cur.execute(query)
if cur.description and cur.rowcount > 0:
headers = [x[0] for x in cur.description]
return [(None, cur, headers, '')]
else:
# Handle the case when the query does not return any results
return None # Placeholder for handling the case when no results are returned |
#! /usr/bin/bash
a=1
while [ $a -le 20 ]
do
if (( $a%3 == 0 && $a%5 == 0 ))
then
echo "fizzbuzz"
elif (( $a%3 == 0 ))
then
echo "fizz"
elif (( $a%5 == 0 ))
then
echo "buzz"
else
echo $a
fi
((a++))
done |
#!/bin/bash
gpu="0"
reward_type="dense"
pos="fixed"
irs="30.0"
sgr="0.0"
lr="1e-4"
meta_lr="1e-5" # 1e-4, 1e-5
fr_lr="0" # 0, 100
death="30.0"
init_std_dev_xy="0.6" # 0.6, 1.2
init_std_dev_z="0.1"
failed_pnt="0.0" # 0.0, -0.2
num_steps="1024"
ext_col="0.0" # 0.0, 0.5, 1.0, 2.0
name="exp"
run="0"
log_dir="hrl4in_tiago_tabletop_hrl4in_ss_hrl_reward_"$reward_type"_pos_"$pos"_sgm_arm_world_irs_"$irs"_sgr_"$sgr"_lr_"$lr"_meta_lr_"$meta_lr"_fr_lr_"$fr_lr"_death_"$death"_init_std_"$init_std_dev_xy"_"$init_std_dev_xy"_"$init_std_dev_z"_failed_pnt_"$failed_pnt"_nsteps_"$num_steps"_ext_col_"$ext_col"_6x6_from_scr_"$name"_run_"$run
echo $log_dir
python -u train_hrl4in_tabletop.py \
--use-gae \
--sim-gpu-id $gpu \
--pth-gpu-id $gpu \
--lr $lr \
--meta-lr $meta_lr \
--freeze-lr-n-updates $fr_lr \
--clip-param 0.1 \
--value-loss-coef 0.5 \
--num-train-processes 1 \
--num-eval-processes 1 \
--num-steps $num_steps \
--num-mini-batch 1 \
--num-updates 50000 \
--use-linear-lr-decay \
--use-linear-clip-decay \
--entropy-coef 0.01 \
--log-interval 1 \
--experiment-folder "ckpt/"$log_dir \
--time-scale 50 \
--intrinsic-reward-scaling $irs \
--subgoal-achieved-reward $sgr \
--subgoal-init-std-dev $init_std_dev_xy $init_std_dev_xy $init_std_dev_z \
--subgoal-failed-penalty $failed_pnt \
--use-action-masks \
--meta-agent-normalize-advantage \
--extrinsic-collision-reward-weight $ext_col \
--meta-gamma 0.99 \
--checkpoint-interval 10 \
--checkpoint-index -1 \
--config-file "tiago_tabletop_hrl4in.yaml" \
--num-eval-episodes 1
|
<reponame>redditech/buildspace-nft-course-starter
import './styles/App.css';
import twitterLogo from './assets/twitter-logo.svg';
import React, { useEffect, useState } from "react";
import { ethers } from 'ethers';
import myEpicNft from './utils/MyEpicNFT.json';
// Constants
const BUILDSPACE_TWITTER_HANDLE = '_buildspace';
const BUILDSPACE_TWITTER_LINK = `https://twitter.com/${BUILDSPACE_TWITTER_HANDLE}`;
const TWITTER_HANDLE = 'redditech';
const TWITTER_LINK = `https://twitter.com/${TWITTER_HANDLE}`;
const OPENSEA_LINK = '';
const TOTAL_MINT_COUNT = 50;
// I moved the contract address to the top for easy access.
const CONTRACT_ADDRESS = "0xC2dc48aEcAc1Bf412bb89dc833d3E9f6fd538edd";
const App = () => {
//state variable to store user's public wallet
const [currentAccount, setCurrentAccount] = useState("");
const checkIfWalletIsConnected = async () => {
// First make sure we have access to window.ethereum
const { ethereum } = window;
if (!ethereum) {
console.log("Make sure you have metamask!");
return;
} else {
console.log("We have the ethereum object", ethereum);
}
// Check we're authorised to access the user's wallet
const accounts = await ethereum.request({ method: 'eth_accounts' });
// Use can have multiple authorised accounts, grab the first one if it is there
if (accounts.length !== 0) {
const account = accounts[0];
console.log("Found an authorized account: ", account);
setCurrentAccount(account);
} else {
console.log("No authorized account found");
}
let chainId = await ethereum.request({ method: 'eth_chainId' });
console.log("Connected to chain " + chainId);
// String, hex code of the chainId of the Rinkebey test network
const rinkebyChainId = "0x4";
if (chainId !== rinkebyChainId) {
alert("You are not connected to the Rinkeby Test Network!");
}
}
// implement connect wallet method
const connectWallet = async () => {
try {
const { ethereum } = window;
if (!ethereum) {
alert("Get Metamask!");
return;
}
// request access to account.
const accounts = await ethereum.request({ method: "eth_requestAccounts" });
// This should print out the public address once we authorise Metamask
console.log("Connected", accounts[0]);
setCurrentAccount(accounts[0]);
// Setup listener, this is for the case where a user comes to our site
// and connected their wallet for the first time
setupEventListener();
} catch (error) {
console.log(error);
}
}
// Setup our listener.
const setupEventListener = async () => {
// Most of this looks the same as the function askContractToMintNFT
try {
const { ethereum } = window;
if (ethereum) {
// Same stuff again
const provider = new ethers.providers.Web3Provider(ethereum);
const signer = provider.getSigner();
const connectedContract = new ethers.Contract(CONTRACT_ADDRESS, myEpicNft.abi, signer);
// This captures our event when our contract throws it.
// very similar to webhooks
connectedContract.on("NewEpicNFTMinted", (from, tokenId) => {
console.log(from, tokenId.toNumber());
alert(`Hey there! We've minted your NFT and sent it to your wallet. It may be blank right now.` +
`It can take a max of 10 min to show up on OpenSea. Here's the link: https://testnets.opensea.io/assets/${CONTRACT_ADDRESS}/${tokenId.toNumber()}`);
});
console.log("Setup event listener!");
} else {
console.log("Ethereum object doesn't exist");
}
} catch (error) {
console.log(error);
}
}
const askContractToMintNFT = async () => {
const CONTRACT_ADDRESS = "0xC2dc48aEcAc1Bf412bb89dc833d3E9f6fd538edd";
try {
const { ethereum } = window;
if (ethereum) {
const provider = new ethers.providers.Web3Provider(ethereum);
const signer = provider.getSigner();
const connectedContract = new ethers.Contract(CONTRACT_ADDRESS, myEpicNft.abi, signer);
let nftTxn = await connectedContract.makeAnEpicNFT();
console.log("Mining...please wait.");
await nftTxn.wait();
console.log(`Mined see transaction: https://rinkeby.etherscan.io/tx/${nftTxn.hash}`);
} else {
console.log("Ethereum object doesn't exist!");
}
} catch (error) {
console.log(error);
}
}
// Render Methods
const renderNotConnectedContainer = () => (
<button className="cta-button connect-wallet-button" onClick={connectWallet}>
Connect to Wallet
</button>
);
const renderMintUI = () => (
<button onClick={askContractToMintNFT} className="cta-button connect-wallet-button">
Mint NFT
</button>
)
// Run our function when the page loads.
useEffect(() => {
checkIfWalletIsConnected();
}, [])
return (
<div className="App">
<div className="container">
<div className="header-container">
<p className="header gradient-text">My Weird Meals NFT Collection</p>
<p className="sub-text">
Each unique. Each beautiful. Discover your NFT today.
</p>
{currentAccount === "" ? renderNotConnectedContainer() : renderMintUI() }
</div>
<div className="footer-container">
<img alt="Twitter Logo" className="twitter-logo" src={twitterLogo} />
<a
className="footer-text"
href={TWITTER_LINK}
target="_blank"
rel="noreferrer"
>{`built by @${TWITTER_HANDLE}`}</a>
<span className="footer-text"> for my project on </span>
<img alt="Twitter Logo" className="twitter-logo" src={twitterLogo} />
<a
className="footer-text"
href={BUILDSPACE_TWITTER_LINK}
target="_blank"
rel="noreferrer"
>{`@${BUILDSPACE_TWITTER_HANDLE}`}</a>
<span className='footer-text'> : "Mint your own NFT collection and ship a Web3 app to show them off"</span>
</div>
</div>
</div>
);
};
export default App;
|
//
// Copyright © 2017-2020 Solus Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package index
import (
"crypto/sha1"
"encoding/xml"
"fmt"
"github.com/getsolus/libeopkg/shared"
"io"
"os"
"path/filepath"
)
// Index is downloaded on a per-reprosity basis to provide information about the repository's:
// - Packages
// - Metadata
type Index struct {
XMLName xml.Name `xml:"PISI"`
Distribution Distribution
Packages []Package `xml:"Package"`
Components []Component `xml:"Component"`
Groups []Group `xml:"Group"`
}
// Load reads the index from a file
func Load(path string) (i *Index, err error) {
i = &Index{}
xmlFile, err := os.Open(path)
if err != nil {
return
}
defer xmlFile.Close()
dec := xml.NewDecoder(xmlFile)
err = dec.Decode(i)
return
}
// hashFile creates a sha1sum for a given file
func hashFile(path string) error {
iFile, err := os.Open(path)
if err != nil {
return err
}
defer iFile.Close()
h := sha1.New()
_, err = io.Copy(h, iFile)
if err != nil {
return err
}
oFile, err := os.Create(path + ".sha1sum")
if err != nil {
return err
}
defer oFile.Close()
fmt.Fprintf(oFile, "%x", h.Sum(nil))
return nil
}
// Save writes the index out to a file, compresses it, and then generates hash files for both files
func (i *Index) Save(path string) error {
indexFile := filepath.Join(path, "eopkg-index.xml")
xmlFile, err := os.Create(indexFile)
if err != nil {
return err
}
enc := xml.NewEncoder(xmlFile)
enc.Indent(" ", " ")
if err = enc.Encode(i); err != nil {
xmlFile.Close()
return err
}
xmlFile.Close()
if err = shared.XzFile(indexFile, true); err != nil {
return err
}
if err = hashFile(indexFile); err != nil {
return err
}
return hashFile(indexFile + ".xz")
}
|
import numpy as np
def calculate_anomaly_scores(data):
# Calculate the mean of each sample
sample_means = np.mean(data, axis=1)
# Calculate the anomaly scores as the absolute differences from the mean
anomaly_scores = np.abs(sample_means - np.mean(sample_means))
return anomaly_scores.tolist() |
/*
* Simple trackball-like motion adapted (ripped off) from projtex.c
* (written by <NAME> and <NAME>). See the SIGGRAPH '96
* Advanced OpenGL course notes.
*/
#include <math.h>
#include <assert.h>
#include <GL/glut.h>
#include "trackball.h"
static GLuint tb_lasttime;
static GLfloat tb_lastposition[3];
static GLfloat tb_angle = 0.0;
static GLfloat tb_axis[3];
static GLfloat tb_transform[4][4];
static GLuint tb_width;
static GLuint tb_height;
static GLint tb_button = -1;
static GLboolean tb_tracking = GL_FALSE;
static GLboolean tb_animate = GL_TRUE;
static void
_tbPointToVector(int x, int y, int width, int height, float v[3])
{
float d, a;
/* project x, y onto a hemi-sphere centered within width, height. */
v[0] = (2.0 * x - width) / width;
v[1] = (height - 2.0 * y) / height;
d = sqrt(v[0] * v[0] + v[1] * v[1]);
v[2] = cos((3.14159265 / 2.0) * ((d < 1.0) ? d : 1.0));
a = 1.0 / sqrt(v[0] * v[0] + v[1] * v[1] + v[2] * v[2]);
v[0] *= a;
v[1] *= a;
v[2] *= a;
}
static void GLUTCALLBACK _tbAnimate(void)
{
glutPostRedisplay();
}
void
_tbStartMotion(int x, int y, int button, int time)
{
assert(tb_button != -1);
tb_tracking = GL_TRUE;
tb_lasttime = time;
_tbPointToVector(x, y, tb_width, tb_height, tb_lastposition);
}
void
_tbStopMotion(int button, unsigned time)
{
assert(tb_button != -1);
tb_tracking = GL_FALSE;
if (time == tb_lasttime && tb_animate) {
glutIdleFunc(_tbAnimate);
} else {
tb_angle = 0.0;
if (tb_animate)
glutIdleFunc(0);
}
}
void
tbAnimate(GLboolean animate)
{
tb_animate = animate;
}
void
tbInit(GLuint button)
{
tb_button = button;
tb_angle = 0.0;
/* put the identity in the trackball transform */
glPushMatrix();
glLoadIdentity();
glGetFloatv(GL_MODELVIEW_MATRIX, (GLfloat *)tb_transform);
glPopMatrix();
}
void
tbMatrix()
{
assert(tb_button != -1);
glPushMatrix();
glLoadIdentity();
glRotatef(tb_angle, tb_axis[0], tb_axis[1], tb_axis[2]);
glMultMatrixf((GLfloat *)tb_transform);
glGetFloatv(GL_MODELVIEW_MATRIX, (GLfloat *)tb_transform);
glPopMatrix();
glMultMatrixf((GLfloat *)tb_transform);
}
void
tbReshape(int width, int height)
{
assert(tb_button != -1);
tb_width = width;
tb_height = height;
}
void
tbMouse(int button, int state, int x, int y)
{
assert(tb_button != -1);
if (state == GLUT_DOWN && button == tb_button)
_tbStartMotion(x, y, button, glutGet(GLUT_ELAPSED_TIME));
else if (state == GLUT_UP && button == tb_button)
_tbStopMotion(button, glutGet(GLUT_ELAPSED_TIME));
}
void
tbMotion(int x, int y)
{
GLfloat current_position[3], dx, dy, dz;
assert(tb_button != -1);
if (tb_tracking == GL_FALSE)
return;
_tbPointToVector(x, y, tb_width, tb_height, current_position);
/* calculate the angle to rotate by (directly proportional to the
length of the mouse movement */
dx = current_position[0] - tb_lastposition[0];
dy = current_position[1] - tb_lastposition[1];
dz = current_position[2] - tb_lastposition[2];
tb_angle = 90.0 * sqrt(dx * dx + dy * dy + dz * dz);
/* calculate the axis of rotation (cross product) */
tb_axis[0] = tb_lastposition[1] * current_position[2] -
tb_lastposition[2] * current_position[1];
tb_axis[1] = tb_lastposition[2] * current_position[0] -
tb_lastposition[0] * current_position[2];
tb_axis[2] = tb_lastposition[0] * current_position[1] -
tb_lastposition[1] * current_position[0];
/* reset for next time */
tb_lasttime = glutGet(GLUT_ELAPSED_TIME);
tb_lastposition[0] = current_position[0];
tb_lastposition[1] = current_position[1];
tb_lastposition[2] = current_position[2];
/* remember to draw new position */
glutPostRedisplay();
}
|
import React, { Component } from 'react';
import Child from './Child.js';
/**
* 中文说明目标
*/
export default class Second extends Component {
state = {
value: 1
}
onClick = () => {
this.state.value = 2;
console.log('onClick', this.state.value);
this.forceUpdate();
}
render () {
console.log(this.state);
return (
<div onClick={this.onClick}>
Second Practice
{this.state.value}
<Child childName='Cchild'/>
</div>
);
}
}
|
<filename>injected.js<gh_stars>0
var refreshTimer = setInterval(removeHandlers, 100);
function removeHandlers(){
$("body").unbind("contextmenu copy cut paste");
}
|
<reponame>twinstone/open-anonymizer
package openanonymizer.core.hash;
import org.apache.commons.lang3.Validate;
import org.apache.log4j.Logger;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Base64;
import static java.nio.charset.StandardCharsets.UTF_8;
/**
* This class allows to generate hash for any {@link String} input using MD5 algorithm.
* <p>
* Use static methods of this class. Do not use constructor.
*
* @version 0.1
* @since Open Anonymizer 1.0.0
*/
public final class HashService {
private static final String ALGORITHM = "MD5";
private static final Logger logger = Logger.getLogger(HashService.class);
/**
* Generates hash for char sequence input.
*
* @param input value
* @param secret value
* @return generated hash
* */
public static String generateHash(final String input, final String secret) {
Validate.notEmpty(input, "Input must be not empty.");
Validate.notEmpty(secret, "Secret must be not empty.");
try {
logger.info(String.format("Generating new hash for input [%s] using secret [%s].", input, secret));
MessageDigest digest = MessageDigest.getInstance(ALGORITHM);
digest.update(input.getBytes(UTF_8));
byte[] bytes = digest.digest(secret.getBytes(UTF_8));
return new String(Base64.getMimeEncoder().encode(bytes));
} catch (NoSuchAlgorithmException e) {
logger.error(String.format("Could not generate hash for input [%s].", input), e);
return input;
}
}
private HashService() {
throw new IllegalStateException(); }
}
|
#include <bits/stdc++.h>
using namespace std;
int main()
{
cout<<13%10;
}
|
#!/bin/bash
: <<DOC
SPSS Statistics 28+ are installed into a folder named "IBM SPSS Statistics".
To continue to allow multiple versions to be installed concurrently, this script
moves the folder to "IBM SPSS Statistics MAJOR_VERSION".
DOC
if [[ -d "/Applications/IBM SPSS Statistics/SPSS Statistics.app" ]]; then
echo "Moving IBM SPSS Statistics app folder"
# check the actual version of the app inside the folder
installed_version=$(/usr/libexec/PlistBuddy -c "Print :CFBundleShortVersionString" "/Applications/IBM SPSS Statistics/SPSS Statistics.app/Contents/Info.plist" | cut -d. -f1)
if ! mv "/Applications/IBM SPSS Statistics" "/Applications/IBM SPSS Statistics $installed_version"; then
echo "ERROR: Failed to move folder"
exit 1
else
echo "'/Applications/IBM SPSS Statistics $installed_version' successfully created"
fi
else
echo "No existing installation present within '/Applications/IBM SPSS Statistics'"
fi
|
var localStorageMock = (function() {
var localStorage = {
};
Object.defineProperty(localStorage, "getItem", {
value: function (key) {
return (localStorage[key] === undefined) ? null : localStorage[key];
},
writable: false,
configurable: false,
enumerable: false
});
Object.defineProperty(localStorage, "setItem", {
value: function (sKey, sValue) {
if (typeof sValue !== "object") {
localStorage[sKey] = sValue + "";
} else {
localStorage[sKey] = sValue;
}
},
writable: false,
configurable: false,
enumerable: false
});
Object.defineProperty(localStorage, "removeItem", {
value: function (sKey) {
if (!sKey) {
return;
}
delete localStorage[sKey]
},
writable: false,
configurable: false,
enumerable: false
});
Object.defineProperty(localStorage, "length", {
get: function () {
return Object.keys(localStorage).length;
},
configurable: false,
enumerable: false
});
Object.defineProperty(localStorage, "clear", {
value: function () {
Object.keys(localStorage).forEach(function (key) {
delete localStorage[key];
});
},
writable: false,
configurable: false,
enumerable: false
});
return localStorage;
})();
var asyncStorageMock = (function() {
var data = {}
var asyncStorage = {
getItem: function (key) {
return new Promise((resolve, reject) => {
resolve(data[key])
});
},
setItem: function (key, value) {
return new Promise((resolve, reject) => {
data[key] = value;
resolve();
});
},
removeItem: function (key) {
return new Promise((resolve, reject) => {
if(data[key]) {
delete data[key];
}
resolve();
});
}
};
return asyncStorage;
})();
Object.defineProperty(window, 'localStorage', { value: localStorageMock });
Object.defineProperty(window, 'asyncStorage', { value: asyncStorageMock }); |
/*! THIS FILE IS AUTO-GENERATED */
import { script_v1 } from './v1';
export declare const VERSIONS: {
'v1': typeof script_v1.Script;
};
export declare function script(version: 'v1'): script_v1.Script;
export declare function script(options: script_v1.Options): script_v1.Script;
|
I have been working with Java for a few months now and I can say that it has been a great learning experience. The language is object-oriented, meaning that it works by modeling real-world concepts. In Java, everything is an object in some sense and the language makes sure that all objects interact in the same way. It also has a rich set of libraries which can be accessed from any Java program to enhance its functionality. Java is platform independent and can be used to write programs for desktops, laptops and the web. This makes it a perfect choice for a wide variety of applications. I have also been able to learn a lot about object-oriented programming and the overall development process. From my experience so far, I can say definitively that Java is a great language to learn and work with. |
import * as express from "express";
import * as jwt from "express-jwt";
import { UserController } from "../controllers/UserController";
import * as dotenv from "dotenv";
dotenv.config();
const router = express["Router"]();
const auth = jwt({ secret: process.env.JWT_SECRET, userProperty: "payload" });
const userController = new UserController();
router.get("/users", auth, userController.readAllUsers);
router.get("/users/:userId", auth, userController.readOneUser);
export = router; |
#!/usr/bin/env bash
set -e
adb push build/android/x86/isa-info /data/local/tmp/isa-info
adb shell /data/local/tmp/isa-info
|
package Entity;
import java.util.List;
public class Movie {
private int movieId;
private String movieTitle;
private int movieLength;
private String releaseDate;
private String director;
private String leadActor;
private String revenue;
private int genres;
private int ratings;
public Movie (int movieId, String movieTitle, int movieLength, String releaseDate,
String director, String leadActor, String revenue, int genres, int ratings) {
this.setMovieId(movieId);
this.setMovieTitle(movieTitle);
this.setMovieLength(movieLength);
this.setReleaseDate(releaseDate);
this.setDirector(director);
this.setLeadActor(leadActor);
this.setRevenue(revenue);
this.setGenres(genres);
this.setRatings(ratings);
}
public int getMovieId() {
return movieId;
}
public void setMovieId(int movieId) {
this.movieId = movieId;
}
public String getMovieTitle() {
return movieTitle;
}
public void setMovieTitle(String movieTitle) {
this.movieTitle = movieTitle;
}
public int getMovieLength() {
return movieLength;
}
public void setMovieLength(int movieLength) {
this.movieLength = movieLength;
}
public String getReleaseDate() {
return releaseDate;
}
public void setReleaseDate(String releaseDate) {
this.releaseDate = releaseDate;
}
public String getDirector() {
return director;
}
public void setDirector(String director) {
this.director = director;
}
public String getLeadActor() {
return leadActor;
}
public void setLeadActor(String leadActor) {
this.leadActor = leadActor;
}
public String getRevenue() {
return revenue;
}
public void setRevenue(String revenue) {
this.revenue = revenue;
}
public int getGenres() {
return genres;
}
public void setGenres(int genres) {
this.genres = genres;
}
public int getRatings() {
return ratings;
}
public void setRatings(int ratings) {
this.ratings = ratings;
}
}
|
#!/bin/bash
################################################################################
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
################################################################################
#
# Build OpenJDK - can be called directly but is typically called by
# docker-build.sh or native-build.sh.
#
# See bottom of the script for the call order and each function for further
# details.
#
# Calls 'configure' then 'make' in order to build OpenJDK
#
################################################################################
set -eu
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# shellcheck source=sbin/prepareWorkspace.sh
source "$SCRIPT_DIR/prepareWorkspace.sh"
# shellcheck source=sbin/common/config_init.sh
source "$SCRIPT_DIR/common/config_init.sh"
# shellcheck source=sbin/common/constants.sh
source "$SCRIPT_DIR/common/constants.sh"
# shellcheck source=sbin/common/common.sh
source "$SCRIPT_DIR/common/common.sh"
export LIB_DIR=$(crossPlatformRealPath "${SCRIPT_DIR}/../pipelines/")
export jreTargetPath
export CONFIGURE_ARGS=""
export ADDITIONAL_MAKE_TARGETS=""
export GIT_CLONE_ARGUMENTS=()
# Parse the CL arguments, defers to the shared function in common-functions.sh
function parseArguments() {
parseConfigurationArguments "$@"
}
# Add an argument to the configure call
addConfigureArg()
{
# Only add an arg if it is not overridden by a user-specified arg.
if [[ ${BUILD_CONFIG[CONFIGURE_ARGS_FOR_ANY_PLATFORM]} != *"$1"* ]] && [[ ${BUILD_CONFIG[USER_SUPPLIED_CONFIGURE_ARGS]} != *"$1"* ]]; then
CONFIGURE_ARGS="${CONFIGURE_ARGS} ${1}${2}"
fi
}
# Add an argument to the configure call (if it's not empty)
addConfigureArgIfValueIsNotEmpty()
{
# Only try to add an arg if the second argument is not empty.
if [ ! -z "$2" ]; then
addConfigureArg "$1" "$2"
fi
}
# Configure the boot JDK
configuringBootJDKConfigureParameter()
{
addConfigureArgIfValueIsNotEmpty "--with-boot-jdk=" "${BUILD_CONFIG[JDK_BOOT_DIR]}"
}
# Configure the boot JDK
configuringMacOSCodesignParameter()
{
if [ ! -z "${BUILD_CONFIG[MACOSX_CODESIGN_IDENTITY]}" ]; then
# This command needs to escape the double quotes because they are needed to preserve the spaces in the codesign cert name
addConfigureArg "--with-macosx-codesign-identity=" "\"${BUILD_CONFIG[MACOSX_CODESIGN_IDENTITY]}\""
fi
}
# Get the OpenJDK update version and build version
getOpenJDKUpdateAndBuildVersion()
{
cd "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}"
if [ -d "${BUILD_CONFIG[OPENJDK_SOURCE_DIR]}/.git" ]; then
# It does exist and it's a repo other than the AdoptOpenJDK one
cd "${BUILD_CONFIG[OPENJDK_SOURCE_DIR]}" || return
if [ -f ".git/shallow.lock" ]
then
echo "Detected lock file, assuming this is an error, removing"
rm ".git/shallow.lock"
fi
# shellcheck disable=SC2154
echo "Pulling latest tags and getting the latest update version using git fetch -q --tags ${BUILD_CONFIG[SHALLOW_CLONE_OPTION]}"
# shellcheck disable=SC2154
echo "NOTE: This can take quite some time! Please be patient"
git fetch -q --tags ${BUILD_CONFIG[SHALLOW_CLONE_OPTION]}
local openJdkVersion=$(getOpenJdkVersion)
if [[ "${openJdkVersion}" == "" ]] ; then
# shellcheck disable=SC2154
echo "Unable to detect git tag, exiting..."
exit 1
else
echo "OpenJDK repo tag is $openJdkVersion"
fi
local openjdk_update_version;
openjdk_update_version=$(echo "${openJdkVersion}" | cut -d'u' -f 2 | cut -d'-' -f 1)
# TODO dont modify config in build script
echo "Version: ${openjdk_update_version} ${BUILD_CONFIG[OPENJDK_BUILD_NUMBER]}"
fi
cd "${BUILD_CONFIG[WORKSPACE_DIR]}"
}
getOpenJdkVersion() {
local version;
if [ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_CORRETTO}" ]; then
local corrVerFile=${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/${BUILD_CONFIG[OPENJDK_SOURCE_DIR]}/version.txt
local corrVersion="$(cut -d'.' -f 1 < ${corrVerFile})"
if [ "${corrVersion}" == "8" ]; then
local updateNum="$(cut -d'.' -f 2 < ${corrVerFile})"
local buildNum="$(cut -d'.' -f 3 < ${corrVerFile})"
local fixNum="$(cut -d'.' -f 4 < ${corrVerFile})"
version="jdk8u${updateNum}-b${buildNum}.${fixNum}"
else
local minorNum="$(cut -d'.' -f 2 < ${corrVerFile})"
local updateNum="$(cut -d'.' -f 3 < ${corrVerFile})"
local buildNum="$(cut -d'.' -f 4 < ${corrVerFile})"
local fixNum="$(cut -d'.' -f 5 < ${corrVerFile})"
version="jdk-${corrVersion}.${minorNum}.${updateNum}+${buildNum}.${fixNum}"
fi
else
version=${BUILD_CONFIG[TAG]:-$(getFirstTagFromOpenJDKGitRepo)}
# TODO remove pending #1016
version=${version%_adopt}
version=${version#aarch64-shenandoah-}
fi
echo ${version}
}
# Ensure that we produce builds with versions strings something like:
#
# openjdk version "1.8.0_131"
# OpenJDK Runtime Environment (build 1.8.0-adoptopenjdk-<user>_2017_04_17_17_21-b00)
# OpenJDK 64-Bit Server VM (build 25.71-b00, mixed mode)
configuringVersionStringParameter()
{
stepIntoTheWorkingDirectory
local openJdkVersion=$(getOpenJdkVersion)
echo "OpenJDK repo tag is ${openJdkVersion}"
# --with-milestone=fcs deprecated at jdk11, removed at jdk12
if [ "${BUILD_CONFIG[OPENJDK_FEATURE_NUMBER]}" -lt 12 ]; then
addConfigureArg "--with-milestone=" "fcs"
fi
local dateSuffix=$(date -u +%Y%m%d%H%M)
if [ "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" == "${JDK8_CORE_VERSION}" ]; then
if [ "${BUILD_CONFIG[RELEASE]}" == "false" ]; then
addConfigureArg "--with-user-release-suffix=" "${dateSuffix}"
fi
if [ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_HOTSPOT}" ]; then
# No JFR support in AIX or zero builds (s390 or armv7l)
if [ "${BUILD_CONFIG[OS_ARCHITECTURE]}" != "s390x" ] && [ "${BUILD_CONFIG[OS_KERNEL_NAME]}" != "aix" ] && [ "${BUILD_CONFIG[OS_ARCHITECTURE]}" != "armv7l" ]; then
addConfigureArg "--enable-jfr" ""
fi
if [ ${BUILD_CONFIG[ADOPT_PATCHES]} == true ]; then
addConfigureArg "--with-vendor-name=" "AdoptOpenJDK"
fi
fi
# Set the update version (e.g. 131), this gets passed in from the calling script
local updateNumber=${BUILD_CONFIG[OPENJDK_UPDATE_VERSION]}
if [ -z "${updateNumber}" ]; then
updateNumber=$(echo "${openJdkVersion}" | cut -f1 -d"-" | cut -f2 -d"u")
fi
addConfigureArgIfValueIsNotEmpty "--with-update-version=" "${updateNumber}"
# Set the build number (e.g. b04), this gets passed in from the calling script
local buildNumber=${BUILD_CONFIG[OPENJDK_BUILD_NUMBER]}
if [ -z "${buildNumber}" ]; then
buildNumber=$(echo "${openJdkVersion}" | cut -f2 -d"-")
fi
if [ "${buildNumber}" ] && [ "${buildNumber}" != "ga" ]; then
addConfigureArgIfValueIsNotEmpty "--with-build-number=" "${buildNumber}"
fi
elif [ "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" == "${JDK9_CORE_VERSION}" ]; then
local buildNumber=${BUILD_CONFIG[OPENJDK_BUILD_NUMBER]}
if [ -z "${buildNumber}" ]; then
buildNumber=$(echo "${openJdkVersion}" | cut -f2 -d"+")
fi
if [ "${BUILD_CONFIG[RELEASE]}" == "false" ]; then
addConfigureArg "--with-version-opt=" "${dateSuffix}"
else
addConfigureArg "--without-version-opt" ""
fi
addConfigureArg "--without-version-pre" ""
addConfigureArgIfValueIsNotEmpty "--with-version-build=" "${buildNumber}"
else
# > JDK 9
# Set the build number (e.g. b04), this gets passed in from the calling script
local buildNumber=${BUILD_CONFIG[OPENJDK_BUILD_NUMBER]}
if [ -z "${buildNumber}" ]; then
# Get build number (eg.10) from tag of potential format "jdk-11.0.4+10_adopt"
buildNumber=$(echo "${openJdkVersion}" | cut -d_ -f1 | cut -f2 -d"+")
fi
if [ "${BUILD_CONFIG[RELEASE]}" == "false" ]; then
addConfigureArg "--with-version-opt=" "${dateSuffix}"
else
addConfigureArg "--without-version-opt" ""
fi
addConfigureArg "--without-version-pre" ""
addConfigureArgIfValueIsNotEmpty "--with-version-build=" "${buildNumber}"
addConfigureArg "--with-vendor-version-string=" "AdoptOpenJDK"
addConfigureArg "--with-vendor-url=" "https://adoptopenjdk.net/"
addConfigureArg "--with-vendor-name=" "AdoptOpenJDK"
addConfigureArg "--with-vendor-bug-url=" "https://github.com/AdoptOpenJDK/openjdk-support/issues"
if [[ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_OPENJ9}" ]]; then
addConfigureArg "--with-vendor-vm-bug-url=" "https://github.com/eclipse/openj9/issues"
else
addConfigureArg "--with-vendor-vm-bug-url=" "https://github.com/AdoptOpenJDK/openjdk-support/issues"
fi
fi
echo "Completed configuring the version string parameter, config args are now: ${CONFIGURE_ARGS}"
}
# Construct all of the 'configure' parameters
buildingTheRestOfTheConfigParameters()
{
if [ ! -z "$(which ccache)" ]; then
addConfigureArg "--enable-ccache" ""
fi
# Point-in-time dependency for openj9 only
if [[ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_OPENJ9}" ]] ; then
addConfigureArg "--with-freemarker-jar=" "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/freemarker-${FREEMARKER_LIB_VERSION}/freemarker.jar"
fi
if [ "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" == "${JDK8_CORE_VERSION}" ] ; then
addConfigureArg "--with-x=" "/usr/include/X11"
addConfigureArg "--with-alsa=" "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/installedalsa"
fi
}
configureDebugParameters() {
# We don't want any extra debug symbols - ensure it's set to release;
# other options include fastdebug and slowdebug.
addConfigureArg "--with-debug-level=" "release"
if [ "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" == "${JDK8_CORE_VERSION}" ]; then
addConfigureArg "--disable-zip-debug-info" ""
if [[ "${BUILD_CONFIG[BUILD_VARIANT]}" != "${BUILD_VARIANT_OPENJ9}" ]]; then
addConfigureArg "--disable-debug-symbols" ""
fi
else
if [[ "${BUILD_CONFIG[BUILD_VARIANT]}" != "${BUILD_VARIANT_OPENJ9}" ]]; then
addConfigureArg "--with-native-debug-symbols=" "none"
fi
fi
}
configureFreetypeLocation() {
if [[ ! "${CONFIGURE_ARGS}" =~ "--with-freetype" ]]; then
if [[ "${BUILD_CONFIG[FREETYPE]}" == "true" ]] ; then
if [[ "$OSTYPE" == "cygwin" ]] || [[ "$OSTYPE" == "msys" ]] ; then
case "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" in
jdk8*|jdk9*|jdk10*) addConfigureArg "--with-freetype-src=" "${BUILD_CONFIG[WORKSPACE_DIR]}/libs/freetype" ;;
*) freetypeDir=${BUILD_CONFIG[FREETYPE_DIRECTORY]:-bundled} ;;
esac
else
local freetypeDir=BUILD_CONFIG[FREETYPE_DIRECTORY]
case "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" in
jdk8*|jdk9*|jdk10*) freetypeDir=${BUILD_CONFIG[FREETYPE_DIRECTORY]:-"${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/installedfreetype"} ;;
*) freetypeDir=${BUILD_CONFIG[FREETYPE_DIRECTORY]:-bundled} ;;
esac
echo "setting freetype dir to ${freetypeDir}"
addConfigureArg "--with-freetype=" "${freetypeDir}"
fi
fi
fi
}
# Configure the command parameters
configureCommandParameters()
{
configuringVersionStringParameter
configuringBootJDKConfigureParameter
configuringMacOSCodesignParameter
configureDebugParameters
if [[ "$OSTYPE" == "cygwin" ]] || [[ "$OSTYPE" == "msys" ]]; then
echo "Windows or Windows-like environment detected, skipping configuring environment for custom Boot JDK and other 'configure' settings."
if [[ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_OPENJ9}" ]] && [ "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" == "${JDK8_CORE_VERSION}" ]; then
local addsDir="${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/${BUILD_CONFIG[OPENJDK_SOURCE_DIR]}/closed/adds"
# This is unfortunately required as if the path does not start with "/cygdrive" the make scripts are unable to find the "/closed/adds" directory.
if ! echo "$addsDir" | egrep -q "^/cygdrive/"; then
# BUILD_CONFIG[WORKSPACE_DIR] does not seem to be an absolute path, prepend /cygdrive/c/cygwin64/"
echo "Prepending /cygdrive/c/cygwin64/ to BUILD_CONFIG[WORKSPACE_DIR]"
addsDir="/cygdrive/c/cygwin64/$addsDir"
fi
echo "adding source route -with-add-source-root=${addsDir}"
addConfigureArg "--with-add-source-root=" "${addsDir}"
fi
else
echo "Building up the configure command..."
buildingTheRestOfTheConfigParameters
fi
echo "Configuring jvm variants if provided"
addConfigureArgIfValueIsNotEmpty "--with-jvm-variants=" "${BUILD_CONFIG[JVM_VARIANT]}"
# Now we add any configure arguments the user has specified on the command line.
CONFIGURE_ARGS="${CONFIGURE_ARGS} ${BUILD_CONFIG[USER_SUPPLIED_CONFIGURE_ARGS]}"
configureFreetypeLocation
echo "Completed configuring the version string parameter, config args are now: ${CONFIGURE_ARGS}"
}
# Make sure we're in the source directory for OpenJDK now
stepIntoTheWorkingDirectory() {
cd "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/${BUILD_CONFIG[OPENJDK_SOURCE_DIR]}" || exit
# corretto nest their source under /src in their dir
if [ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_CORRETTO}" ]; then
cd "src";
fi
echo "Should have the source, I'm at $PWD"
}
buildTemplatedFile() {
echo "Configuring command and using the pre-built config params..."
stepIntoTheWorkingDirectory
echo "Currently at '${PWD}'"
FULL_CONFIGURE="bash ./configure --verbose ${CONFIGURE_ARGS} ${BUILD_CONFIG[CONFIGURE_ARGS_FOR_ANY_PLATFORM]}"
echo "Running ./configure with arguments '${FULL_CONFIGURE}'"
# If it's Java 9+ then we also make test-image to build the native test libraries,
# For openj9 add debug-image
JDK_PREFIX="jdk"
JDK_VERSION_NUMBER="${BUILD_CONFIG[OPENJDK_FEATURE_NUMBER]}"
if [[ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_OPENJ9}" ]]; then
ADDITIONAL_MAKE_TARGETS=" test-image debug-image"
elif [ "$JDK_VERSION_NUMBER" -gt 8 ] || [ "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" == "${JDKHEAD_VERSION}" ]; then
ADDITIONAL_MAKE_TARGETS=" test-image"
fi
FULL_MAKE_COMMAND="${BUILD_CONFIG[MAKE_COMMAND_NAME]} ${BUILD_CONFIG[MAKE_ARGS_FOR_ANY_PLATFORM]} ${BUILD_CONFIG[USER_SUPPLIED_MAKE_ARGS]} ${ADDITIONAL_MAKE_TARGETS}"
# shellcheck disable=SC2002
cat "$SCRIPT_DIR/build.template" | \
sed -e "s|{configureArg}|${FULL_CONFIGURE}|" \
-e "s|{makeCommandArg}|${FULL_MAKE_COMMAND}|" > "${BUILD_CONFIG[WORKSPACE_DIR]}/config/configure-and-build.sh"
}
executeTemplatedFile() {
stepIntoTheWorkingDirectory
echo "Currently at '${PWD}'"
# Execute the build passing the workspace dir and target dir as params for configure.txt
bash "${BUILD_CONFIG[WORKSPACE_DIR]}/config/configure-and-build.sh" ${BUILD_CONFIG[WORKSPACE_DIR]} ${BUILD_CONFIG[TARGET_DIR]}
exitCode=$?
if [ "${exitCode}" -eq 1 ]; then
echo "Failed to make the JDK, exiting"
exit 1;
elif [ "${exitCode}" -eq 2 ]; then
echo "Failed to configure the JDK, exiting"
echo "Did you set the JDK boot directory correctly? Override by exporting JDK_BOOT_DIR"
echo "For example, on RHEL you would do export JDK_BOOT_DIR=/usr/lib/jvm/java-1.7.0-openjdk-1.7.0.131-2.6.9.0.el7_3.x86_64"
echo "Current JDK_BOOT_DIR value: ${BUILD_CONFIG[JDK_BOOT_DIR]}"
exit 2;
fi
}
getGradleJavaHome() {
local gradleJavaHome=""
if [ ${JAVA_HOME+x} ] && [ -d "${JAVA_HOME}" ]; then
gradleJavaHome=${JAVA_HOME}
fi
if [ ${JDK8_BOOT_DIR+x} ] && [ -d "${JDK8_BOOT_DIR}" ]; then
gradleJavaHome=${JDK8_BOOT_DIR}
fi
# Special case arm because for some unknown reason the JDK11_BOOT_DIR that arm downloads is unable to form connection
# to services.gradle.org
if [ ${JDK11_BOOT_DIR+x} ] && [ -d "${JDK11_BOOT_DIR}" ] && [ "${ARCHITECTURE}" != "arm" ]; then
gradleJavaHome=${JDK11_BOOT_DIR}
fi
if [ ! -d "$gradleJavaHome" ]; then
echo "[WARNING] Unable to find java to run gradle with, this build may fail with /bin/java: No such file or directory. Set JAVA_HOME, JDK8_BOOT_DIR or JDK11_BOOT_DIR to squash this warning: $gradleJavaHome">&2
fi
echo $gradleJavaHome
}
getGradleUserHome() {
local gradleUserHome=""
if [ -n "${BUILD_CONFIG[GRADLE_USER_HOME_DIR]}" ]; then
gradleUserHome="${BUILD_CONFIG[GRADLE_USER_HOME_DIR]}"
else
gradleUserHome="${BUILD_CONFIG[WORKSPACE_DIR]}/.gradle"
fi
echo $gradleUserHome
}
buildSharedLibs() {
cd "${LIB_DIR}"
local gradleJavaHome=$(getGradleJavaHome)
local gradleUserHome=$(getGradleUserHome)
echo "Running gradle with $gradleJavaHome at $gradleUserHome"
gradlecount=1
while ! JAVA_HOME="$gradleJavaHome" GRADLE_USER_HOME="$gradleUserHome" bash ./gradlew --no-daemon clean shadowJar; do
echo "RETRYWARNING: Gradle failed on attempt $gradlecount"
sleep 120 # Wait before retrying in case of network/server outage ...
gradlecount=$(( gradlecount + 1 ))
[ $gradlecount -gt 3 ] && exit 1
done
# Test that the parser can execute as fail fast rather than waiting till after the build to find out
"$gradleJavaHome"/bin/java -version 2>&1 | "$gradleJavaHome"/bin/java -cp "target/libs/adopt-shared-lib.jar" ParseVersion -s -f semver 1
}
parseJavaVersionString() {
ADOPT_BUILD_NUMBER="${ADOPT_BUILD_NUMBER:-1}"
local javaVersion=$(JAVA_HOME="$PRODUCT_HOME" "$PRODUCT_HOME"/bin/java -version 2>&1)
cd "${LIB_DIR}"
local gradleJavaHome=$(getGradleJavaHome)
local version=$(echo "$javaVersion" | JAVA_HOME="$gradleJavaHome" "$gradleJavaHome"/bin/java -cp "target/libs/adopt-shared-lib.jar" ParseVersion -s -f openjdk-semver $ADOPT_BUILD_NUMBER | tr -d '\n')
echo $version
}
# Print the version string so we know what we've produced
printJavaVersionString()
{
stepIntoTheWorkingDirectory
case "${BUILD_CONFIG[OS_KERNEL_NAME]}" in
"darwin")
# shellcheck disable=SC2086
PRODUCT_HOME=$(ls -d ${PWD}/build/*/images/${BUILD_CONFIG[JDK_PATH]}/Contents/Home)
;;
*)
# shellcheck disable=SC2086
PRODUCT_HOME=$(ls -d ${PWD}/build/*/images/${BUILD_CONFIG[JDK_PATH]})
;;
esac
if [[ -d "$PRODUCT_HOME" ]]; then
echo "'$PRODUCT_HOME' found"
if [ ! -r "$PRODUCT_HOME/bin/java" ]; then
echo "===$PRODUCT_HOME===="
ls -alh "$PRODUCT_HOME"
echo "===$PRODUCT_HOME/bin/===="
ls -alh "$PRODUCT_HOME/bin/"
echo "Error 'java' does not exist in '$PRODUCT_HOME'."
exit -1
elif [ "${ARCHITECTURE}" == "riscv64" ]; then
# riscv is cross compiled, so we cannot run it on the build system
# This is a temporary plausible solution in the absence of another fix
local jdkversion=$(getOpenJdkVersion)
cat << EOT > "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[TARGET_DIR]}/version.txt"
openjdk version "${jdkversion%%+*}" "$(date +%Y-%m-%d)"
OpenJDK Runtime Environment AdoptOpenJDK (build ${jdkversion%%+*}+0-$(date +%Y%m%d%H%M))
Eclipse OpenJ9 VM AdoptOpenJDK (build master-000000000, JRE 11 Linux riscv-64-Bit Compressed References $(date +%Y%m%d)_00 (JIT disabled, AOT disabled)
OpenJ9 - 000000000
OMR - 000000000
JCL - 000000000 based on ${jdkversion})
EOT
else
# print version string around easy to find output
# do not modify these strings as jenkins looks for them
echo "=JAVA VERSION OUTPUT="
"$PRODUCT_HOME"/bin/java -version 2>&1
echo "=/JAVA VERSION OUTPUT="
"$PRODUCT_HOME"/bin/java -version > "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[TARGET_DIR]}/version.txt" 2>&1
fi
else
echo "'$PRODUCT_HOME' does not exist, build might have not been successful or not produced the expected JDK image at this location."
exit -1
fi
}
getJdkArchivePath() {
# Todo: Set this to the outcome of https://github.com/AdoptOpenJDK/openjdk-build/issues/1016
# local version="$(parseJavaVersionString)
# echo "jdk-${version}"
local version=$(getOpenJdkVersion)
echo "$version"
}
getJreArchivePath() {
local jdkArchivePath=$(getJdkArchivePath)
echo "${jdkArchivePath}-jre"
}
getTestImageArchivePath() {
local jdkArchivePath=$(getJdkArchivePath)
echo "${jdkArchivePath}-test-image"
}
getDebugImageArchivePath() {
local jdkArchivePath=$(getJdkArchivePath)
echo "${jdkArchivePath}-debug-image"
}
# Clean up
removingUnnecessaryFiles() {
local jdkTargetPath=$(getJdkArchivePath)
local jreTargetPath=$(getJreArchivePath)
local testImageTargetPath=$(getTestImageArchivePath)
local debugImageTargetPath=$(getDebugImageArchivePath)
echo "Removing unnecessary files now..."
stepIntoTheWorkingDirectory
cd build/*/images || return
echo "Currently at '${PWD}'"
local jdkPath=$(ls -d ${BUILD_CONFIG[JDK_PATH]})
echo "moving ${jdkPath} to ${jdkTargetPath}"
rm -rf "${jdkTargetPath}" || true
mv "${jdkPath}" "${jdkTargetPath}"
if [ -d "$(ls -d ${BUILD_CONFIG[JRE_PATH]})" ]
then
echo "moving $(ls -d ${BUILD_CONFIG[JRE_PATH]}) to ${jreTargetPath}"
rm -rf "${jreTargetPath}" || true
mv "$(ls -d ${BUILD_CONFIG[JRE_PATH]})" "${jreTargetPath}"
case "${BUILD_CONFIG[OS_KERNEL_NAME]}" in
"darwin") dirToRemove="${jreTargetPath}/Contents/Home" ;;
*) dirToRemove="${jreTargetPath}" ;;
esac
rm -rf "${dirToRemove}"/demo || true
fi
# Test image - check if the config is set and directory exists
local testImagePath="${BUILD_CONFIG[TEST_IMAGE_PATH]}"
if [ ! -z "${testImagePath}" ] && [ -d "${testImagePath}" ]
then
echo "moving ${testImagePath} to ${testImageTargetPath}"
rm -rf "${testImageTargetPath}" || true
mv "${testImagePath}" "${testImageTargetPath}"
fi
# Debug image - check if the config is set and directory exists
local debugImagePath="${BUILD_CONFIG[DEBUG_IMAGE_PATH]}"
if [ ! -z "${debugImagePath}" ] && [ -d "${debugImagePath}" ]
then
echo "moving ${debugImagePath} to ${debugImageTargetPath}"
rm -rf "${debugImageTargetPath}" || true
mv "${debugImagePath}" "${debugImageTargetPath}"
fi
# Remove files we don't need
case "${BUILD_CONFIG[OS_KERNEL_NAME]}" in
"darwin") dirToRemove="${jdkTargetPath}/Contents/Home" ;;
*) dirToRemove="${jdkTargetPath}" ;;
esac
rm -rf "${dirToRemove}"/demo || true
# In OpenJ9 builds, debug symbols are captured in the debug image:
# we don't want another copy of them in the main JDK or JRE archives.
# Builds for other variants don't normally include debug symbols,
# but if they were explicitly requested via the configure option
# '--with-native-debug-symbols=(external|zipped)' leave them alone.
if [[ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_OPENJ9}" ]] ; then
# .diz files may be present on any platform
# Note that on AIX, find does not support the '-delete' option.
find "${jdkTargetPath}" "${jreTargetPath}" -type f -name "*.diz" | xargs rm -f || true
case "${BUILD_CONFIG[OS_KERNEL_NAME]}" in
*cygwin*)
# on Windows, we want to remove .map and .pdb files
find "${jdkTargetPath}" "${jreTargetPath}" -type f -name "*.map" -delete || true
find "${jdkTargetPath}" "${jreTargetPath}" -type f -name "*.pdb" -delete || true
;;
darwin)
# on MacOSX, we want to remove .dSYM folders
find "${jdkTargetPath}" "${jreTargetPath}" -type d -name "*.dSYM" | xargs -I "{}" rm -rf "{}"
;;
*)
# on other platforms, we want to remove .debuginfo files
find "${jdkTargetPath}" "${jreTargetPath}" -type f -name "*.debuginfo" | xargs rm -f || true
;;
esac
fi
echo "Finished removing unnecessary files from ${jdkTargetPath}"
}
moveFreetypeLib() {
local LIB_DIRECTORY="${1}"
if [ ! -d "${LIB_DIRECTORY}" ]; then
echo "Could not find dir: ${LIB_DIRECTORY}"
return
fi
echo " Performing copying of the free font library to ${LIB_DIRECTORY}, applicable for this version of the JDK. "
local SOURCE_LIB_NAME="${LIB_DIRECTORY}/libfreetype.dylib.6"
if [ ! -f "${SOURCE_LIB_NAME}" ]; then
SOURCE_LIB_NAME="${LIB_DIRECTORY}/libfreetype.dylib"
fi
if [ ! -f "${SOURCE_LIB_NAME}" ]; then
echo "[Error] ${SOURCE_LIB_NAME} does not exist in the ${LIB_DIRECTORY} folder, please check if this is the right folder to refer to, aborting copy process..."
return
fi
local TARGET_LIB_NAME="${LIB_DIRECTORY}/libfreetype.6.dylib"
local INVOKED_BY_FONT_MANAGER="${LIB_DIRECTORY}/libfontmanager.dylib"
echo "Currently at '${PWD}'"
echo "Copying ${SOURCE_LIB_NAME} to ${TARGET_LIB_NAME}"
echo " *** Workaround to fix the MacOSX issue where invocation to ${INVOKED_BY_FONT_MANAGER} fails to find ${TARGET_LIB_NAME} ***"
# codesign freetype before it is bundled
if [ ! -z "${BUILD_CONFIG[MACOSX_CODESIGN_IDENTITY]}" ]; then
ENTITLEMENTS="$WORKSPACE/entitlements.plist"
codesign --entitlements "$ENTITLEMENTS" --options runtime --timestamp --sign "${BUILD_CONFIG[MACOSX_CODESIGN_IDENTITY]}" "${SOURCE_LIB_NAME}"
fi
cp "${SOURCE_LIB_NAME}" "${TARGET_LIB_NAME}"
if [ -f "${INVOKED_BY_FONT_MANAGER}" ]; then
otool -L "${INVOKED_BY_FONT_MANAGER}"
else
# shellcheck disable=SC2154
echo "[Warning] ${INVOKED_BY_FONT_MANAGER} does not exist in the ${LIB_DIRECTORY} folder, please check if this is the right folder to refer to, this may cause runtime issues, please beware..."
fi
otool -L "${TARGET_LIB_NAME}"
echo "Finished copying ${SOURCE_LIB_NAME} to ${TARGET_LIB_NAME}"
}
# If on a Mac, mac a copy of the font lib as required
makeACopyOfLibFreeFontForMacOSX() {
local DIRECTORY="${1}"
local PERFORM_COPYING=$2
echo "PERFORM_COPYING=${PERFORM_COPYING}"
if [ "${PERFORM_COPYING}" == "false" ]; then
echo " Skipping copying of the free font library to ${DIRECTORY}, does not apply for this version of the JDK. "
return
fi
if [[ "${BUILD_CONFIG[OS_KERNEL_NAME]}" == "darwin" ]]; then
moveFreetypeLib "${DIRECTORY}/Contents/Home/lib"
moveFreetypeLib "${DIRECTORY}/Contents/Home/jre/lib"
fi
}
# Get the tags from the git repo and choose the latest tag when there is more than one for the same SHA.
# Excluding "openj9" tag names as they have other ones for milestones etc. that get in the way
getFirstTagFromOpenJDKGitRepo()
{
# If openj9 and the closed/openjdk-tag.gmk file exists which specifies what level the openj9 jdk code is based upon...
# Read OPENJDK_TAG value from that file..
local openj9_openjdk_tag_file="${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/${BUILD_CONFIG[OPENJDK_SOURCE_DIR]}/closed/openjdk-tag.gmk"
if [[ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_OPENJ9}" ]] && [[ -f "${openj9_openjdk_tag_file}" ]]; then
firstMatchingNameFromRepo=$(grep OPENJDK_TAG ${openj9_openjdk_tag_file} | awk 'BEGIN {FS = "[ :=]+"} {print $2}')
else
git fetch --tags "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/${BUILD_CONFIG[OPENJDK_SOURCE_DIR]}"
revList=$(git rev-list --tags --topo-order --max-count=$GIT_TAGS_TO_SEARCH)
if [[ "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" == "${JDKHEAD_VERSION}" ]]; then
# For the development tree jdk/jdk, there might be two major versions in development
# in parallel. One in stabilization mode, and the currently active developement line
# Thus, add an explicit grep on the specified FEATURE_VERSION so as to appropriately
# set the correct build number later on.
firstMatchingNameFromRepo=$(git describe --tags $revList | grep "jdk-${BUILD_CONFIG[OPENJDK_FEATURE_NUMBER]}" | grep -v openj9 | grep -v _adopt | grep -v "\-ga" | head -1)
else
firstMatchingNameFromRepo=$(git describe --tags $revList | grep jdk | grep -v openj9 | grep -v _adopt | grep -v "\-ga" | head -1)
fi
# this may not find the correct tag if there are multiples on the commit so find commit
# that contains this tag and then use `git tag` to find the real tag
revList=$(git rev-list -n 1 $firstMatchingNameFromRepo --)
firstMatchingNameFromRepo=$(git tag --points-at $revList | grep -v "\-ga" | tail -1)
fi
if [ -z "$firstMatchingNameFromRepo" ]; then
echo "WARNING: Failed to identify latest tag in the repository" 1>&2
else
echo "$firstMatchingNameFromRepo"
fi
}
createArchive() {
repoLocation=$1
targetName=$2
archiveExtension=$(getArchiveExtension)
createOpenJDKArchive "${repoLocation}" "OpenJDK"
archive="${PWD}/OpenJDK${archiveExtension}"
echo "Your final archive was created at ${archive}"
echo "Moving the artifact to ${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[TARGET_DIR]}"
mv "${archive}" "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[TARGET_DIR]}/${targetName}"
}
# Create a Tar ball
createOpenJDKTarArchive()
{
local jdkTargetPath=$(getJdkArchivePath)
local jreTargetPath=$(getJreArchivePath)
local testImageTargetPath=$(getTestImageArchivePath)
local debugImageTargetPath=$(getDebugImageArchivePath)
echo "OpenJDK JDK path will be ${jdkTargetPath}. JRE path will be ${jreTargetPath}"
if [ -d "${jreTargetPath}" ]; then
local jreName=$(echo "${BUILD_CONFIG[TARGET_FILE_NAME]}" | sed 's/-jdk/-jre/')
createArchive "${jreTargetPath}" "${jreName}"
fi
if [ -d "${testImageTargetPath}" ]; then
echo "OpenJDK test image path will be ${testImageTargetPath}."
local testImageName=$(echo "${BUILD_CONFIG[TARGET_FILE_NAME]//-jdk/-testimage}")
createArchive "${testImageTargetPath}" "${testImageName}"
fi
if [ -d "${debugImageTargetPath}" ]; then
echo "OpenJDK debug image path will be ${debugImageTargetPath}."
local debugImageName=$(echo "${BUILD_CONFIG[TARGET_FILE_NAME]//-jdk/-debugimage}")
createArchive "${debugImageTargetPath}" "${debugImageName}"
fi
createArchive "${jdkTargetPath}" "${BUILD_CONFIG[TARGET_FILE_NAME]}"
}
# Echo success
showCompletionMessage()
{
echo "All done!"
}
copyFreeFontForMacOS() {
local jdkTargetPath=$(getJdkArchivePath)
local jreTargetPath=$(getJreArchivePath)
makeACopyOfLibFreeFontForMacOSX "${jdkTargetPath}" "${BUILD_CONFIG[COPY_MACOSX_FREE_FONT_LIB_FOR_JDK_FLAG]}"
makeACopyOfLibFreeFontForMacOSX "${jreTargetPath}" "${BUILD_CONFIG[COPY_MACOSX_FREE_FONT_LIB_FOR_JRE_FLAG]}"
}
wipeOutOldTargetDir() {
rm -r "${BUILD_CONFIG[WORKSPACE_DIR]:?}/${BUILD_CONFIG[TARGET_DIR]}" || true
}
createTargetDir() {
# clean out old builds
mkdir -p "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[TARGET_DIR]}" || exit
}
fixJavaHomeUnderDocker() {
# If we are inside docker we cannot trust the JDK_BOOT_DIR that was detected on the host system
if [[ "${BUILD_CONFIG[USE_DOCKER]}" == "true" ]];
then
# clear BUILD_CONFIG[JDK_BOOT_DIR] and re set it
BUILD_CONFIG[JDK_BOOT_DIR]=""
setBootJdk
fi
}
################################################################################
loadConfigFromFile
fixJavaHomeUnderDocker
cd "${BUILD_CONFIG[WORKSPACE_DIR]}"
parseArguments "$@"
buildSharedLibs
wipeOutOldTargetDir
createTargetDir
configureWorkspace
getOpenJDKUpdateAndBuildVersion
configureCommandParameters
buildTemplatedFile
executeTemplatedFile
printJavaVersionString
removingUnnecessaryFiles
copyFreeFontForMacOS
createOpenJDKTarArchive
showCompletionMessage
# ccache is not detected properly TODO
# change grep to something like $GREP -e '^1.*' -e '^2.*' -e '^3\.0.*' -e '^3\.1\.[0123]$'`]
# See https://github.com/AdoptOpenJDK/openjdk-jdk8u/blob/dev/common/autoconf/build-performance.m4
|
def calculate_total_neighbors(input_grid_size, output_grid_size, resolution):
# Set default neighbors used in stencil to 1. Normal default is 8, which won't work if the input and output grids are similar in size and resolution.
default_neighbors = 1
# Calculate the total number of neighbors based on the specified parameters
total_neighbors = default_neighbors * (input_grid_size[0] * input_grid_size[1] * output_grid_size[0] * output_grid_size[1]) / resolution
return total_neighbors |
def calculate_position(s0: float, v0: float, a: float, t: float) -> float:
return s0 + v0 * t + 0.5 * a * t**2
position = calculate_position(10.0, 5.0, 2.0, 3.0)
print(position) # Output: 31.0 |
<reponame>GolemXlV/poolmate
from argparse import ArgumentParser
from options import write_options
import pandas as pd
import numpy as np
import math
import tqdm
import pickle
import re
import sys
import scipy
import string
import io
class Algorithm(object):
def __init__(self,
random,
pool_size,
teaching_set_size,
initial_teaching_set):
self.random = random
self.pool_size = pool_size
self.teaching_set_size = teaching_set_size
self.best_loss = np.inf
self.best_model = None
self.best_set = None
self.result = None
self._initial_teaching_set = initial_teaching_set
self.calls_to_initial_teaching_set = 0
def validate(self, search_budget):
pass
def next_fit_request(self):
pass
def next_fit_result(self, model, loss, set):
pass
def accept_best(self, model, loss, set):
if loss < self.best_loss:
self.best_model = model
self.best_loss = loss
self.best_set = set
def initial_teaching_set(self):
if self.calls_to_initial_teaching_set == 0 and self._initial_teaching_set:
self.calls_to_initial_teaching_set += 1
return self._initial_teaching_set
else:
self.calls_to_initial_teaching_set += 1
return [self.random.randint(0, self.pool_size) for _ in range(self.teaching_set_size)]
class RandomIndexGreedySwap(Algorithm):
def __init__(self,
random,
pool_size,
teaching_set_size,
initial_teaching_set,
search_budget,
proposals):
super(RandomIndexGreedySwap, self).__init__(random,
pool_size,
teaching_set_size,
initial_teaching_set)
self.search_budget = search_budget
self.proposals = proposals or pool_size
self.current_set = None
self.current_model = None
self.current_loss = None
self.models_to_fetch = []
self.models_fetched = []
self.step = 0
def fill_models_to_fetch(self, base_set):
idx = self.random.randint(0, self.teaching_set_size)
if self.step + self.proposals > self.search_budget:
rng = self.random.choice(self.pool_size,
size=self.search_budget - self.step,
replace=False).tolist()
elif self.proposals < self.pool_size:
rng = self.random.choice(self.pool_size,
size=self.proposals,
replace=False).tolist()
else:
rng = range(self.pool_size)
for n in reversed(rng):
ns = base_set[0:idx] + [n] + base_set[idx + 1:]
self.models_to_fetch.append(ns)
def next_fit_request(self):
if not self.current_set:
self.fill_models_to_fetch(self.initial_teaching_set())
elif not self.models_to_fetch:
self.models_fetched = []
self.fill_models_to_fetch(self.current_set)
return self.models_to_fetch.pop()
def next_fit_result(self, model, loss, set):
self.step += 1
if not self.current_loss:
self.current_set = set
self.current_model = model
self.current_loss = loss
self.models_fetched.append((model, loss, set))
if not self.models_to_fetch:
for m, l, s in self.models_fetched:
if l < self.current_loss:
self.current_model = m
self.current_loss = l
self.current_set = s
self.accept_best(self.current_model,
self.current_loss,
self.current_set)
class UniformSampling(Algorithm):
def next_fit_request(self):
return self.initial_teaching_set()
def next_fit_result(self, model, loss, set):
self.accept_best(model, loss, set)
class GreedyAdd(Algorithm):
def __init__(self,
random,
pool_size,
teaching_set_size,
initial_teaching_set,
proposals,
search_budget):
super(GreedyAdd, self).__init__(random,
pool_size,
teaching_set_size,
initial_teaching_set)
self.proposals = proposals or min(pool_size,
search_budget / teaching_set_size)
# TODO could also add a second flag for # of teaching sets you want to
# produce which then sets proposals appropriately
self.current_set = []
self.models_to_fetch = []
self.models_fetched = []
def validate(self, search_budget):
if self.proposals * self.teaching_set_size > search_budget:
msg = 'Parameters will not produce a teaching set: ' + \
'proposals * teaching budget > search budget'
raise Exception(msg)
def next_fit_request(self):
if self.models_to_fetch:
return self.models_to_fetch.pop()
else:
self.models_fetched = []
if len(self.current_set) == self.teaching_set_size:
self.current_set = []
xs = self.random.choice(self.pool_size,
size=self.proposals,
replace=False).tolist()
for x in xs:
self.models_to_fetch.append(self.current_set + [x])
return self.models_to_fetch.pop()
def next_fit_result(self, model, loss, set):
# notice that the [] is not considered: bug!
self.models_fetched.append((model, loss, set))
# option: stay at the current size if no better set is found
if not self.models_to_fetch:
best_loss = np.inf
for m, l, s in self.models_fetched:
if l < best_loss:
self.current_model = m
self.current_loss = l
self.current_set = s
best_loss = l
# to return any sized set remove following condition
if len(self.current_set) == self.teaching_set_size:
self.accept_best(self.current_model,
self.current_loss,
self.current_set)
class Result(object):
def __init__(self):
self.best_model = None
self.best_set = None
self.best_evaluation_loss = None
self.current_sets = []
self.best_sets = []
self.test_loss = None
self.fits = []
class Runner(object):
def construct_algorithm(self, options):
if options.algorithm == 'greedy-add':
algorithm = GreedyAdd(options.rs,
options.num_train,
options.teaching_set_size,
options.initial_teaching_set,
options.proposals,
options.search_budget)
elif options.algorithm == 'random-index-greedy-swap':
algorithm = RandomIndexGreedySwap(options.rs,
options.num_train,
options.teaching_set_size,
options.initial_teaching_set,
options.search_budget,
options.proposals)
elif options.algorithm == 'uniform':
algorithm = UniformSampling(options.rs,
options.num_train,
options.teaching_set_size,
options.initial_teaching_set)
else:
msg = 'Algorithm %s not recognized' % options.algorithm
raise Exception(msg)
return algorithm
def run_experiment(self, instance, learner, options):
options.num_train = len(instance)
options.rs = np.random.RandomState(seed=options.seed)
algorithm = self.construct_algorithm(options)
algorithm.validate(options.search_budget)
if options.log and type(options.log) == str:
log = open(options.log, 'w')
else:
log = options.log
if log:
write_options(options, log)
rng = range(options.search_budget) if options.no_progress else tqdm.trange(options.search_budget)
for i in rng:
s = algorithm.next_fit_request()
m = learner.fit([instance[x] for x in s])
l = learner.loss(m)
if log:
log.write("%d, %f, %s\n" % (i, l, ' '.join(map(str, s))))
algorithm.next_fit_result(m, l, s)
if log and not isinstance(log, io.StringIO):
log.close()
return algorithm.best_loss, algorithm.best_set
|
<filename>open-sphere-base/core/src/main/java/io/opensphere/core/util/swing/ColorIcon.java
package io.opensphere.core.util.swing;
import java.awt.Color;
import java.awt.Component;
import java.awt.Graphics;
import java.awt.Graphics2D;
import javax.swing.Icon;
/**
* Simple icon implementation that just draws a square in a constant color.
*/
public class ColorIcon implements Icon
{
/** Width of the icon. */
private int myIconWidth = 24;
/** Height of the icon. */
private int myIconHeight = 24;
/** Current Color of the icon. */
private Color myColor = Color.WHITE;
/**
* Instantiates a new color icon.
*
*/
public ColorIcon()
{
}
/**
* Instantiates a new color icon.
*
* @param c the {@link Color}
*/
public ColorIcon(Color c)
{
myColor = c;
}
/**
* Instantiates a new color icon.
*
* @param c the {@link Color}
* @param width the width
* @param height the height
*/
public ColorIcon(Color c, int width, int height)
{
myColor = c;
myIconHeight = height;
myIconWidth = width;
}
/**
* Get the color.
*
* @return the color
*/
public Color getColor()
{
return myColor;
}
@Override
public int getIconHeight()
{
return myIconHeight;
}
@Override
public int getIconWidth()
{
return myIconWidth;
}
@Override
public void paintIcon(Component c, Graphics g, int x, int y)
{
Graphics2D g2d = (Graphics2D)g.create();
g2d.setColor(myColor);
g2d.fillRect(x, y, myIconWidth, myIconHeight);
g2d.dispose();
}
/**
* Set the color.
*
* @param color the color to set
*/
public void setColor(Color color)
{
myColor = color;
}
/**
* Set the height of the icon.
*
* @param height iconHeight to set
*/
public void setIconHeight(int height)
{
myIconHeight = height;
}
/**
* Set the width of the icon.
*
* @param width iconWidth to set
*/
public void setIconWidth(int width)
{
myIconWidth = width;
}
}
|
<reponame>pombreda/nssecurity
#ifndef __LOG_H
#define __LOG_H
#ifdef NDEBUG
# define l_debug(format...)
#else
# define l_debug(format...) do { \
l_debug_(__FUNCTION__, ## format); \
} while (false)
#endif
#define l_message(format...) do { \
l_message_(__FUNCTION__, ## format); \
} while (false)
#define l_warning(format...) do { \
l_warning_(__FUNCTION__, ## format); \
} while (false)
#define l_error(format...) do { \
l_error_(__FUNCTION__, ## format); \
} while (false)
void l_message_(const char *function, const char *format, ...);
void l_debug_(const char *function, const char *format, ...);
void l_warning_(const char *function, const char *format, ...);
void l_error_(const char *function, const char *format, ...);
#endif
|
def iterative_fibonacci(n):
a = 0
b = 1
for i in range(n):
c = a + b
a = b
b = c
return a |
import React from 'react';
import {Table, Checkbox} from "semantic-ui-react";
export const NotificationMap = ({map, teams, statuses, onChange}) => {
return (
<Table definition>
<Table.Header>
<Table.Row textAlign='center'>
<Table.HeaderCell />
{
statuses.map((status, i) => (
<Table.HeaderCell key={i} content={status}/>
))
}
</Table.Row>
</Table.Header>
<Table.Body>
{
teams.map((team, teamIndex) => (
<Table.Row textAlign='center' key={teamIndex}>
<Table.Cell content={team}/>
{
map[teamIndex].map((checked, statusIndex) => (
<Table.Cell key={statusIndex}>
<Checkbox
checked={checked}
onChange={(_, {checked}) => onChange(teamIndex, statusIndex, checked)}
/>
</Table.Cell>
))
}
</Table.Row>
))
}
</Table.Body>
</Table>
);
}; |
def remove_duplicates(arr):
new_arr = []
for elem in arr:
if elem not in new_arr:
new_arr.append(elem)
return new_arr |
<reponame>gsavchenko/torontowastelookup<gh_stars>1-10
import React from "react";
import WasteWizardEntries from "./WasteWizardEntries";
import Favorites from "./Favorites";
import LocalStorage from "../utils/localStorage";
class SearchResults extends React.Component {
constructor() {
super();
this.state = {
search: "",
favorites: LocalStorage.obtain("favorites", { createIfMissing: true })
};
}
update = () => {
const loadFavorites = LocalStorage.obtain("favorites");
this.setState({ favorites: loadFavorites });
};
updateSearchResults = () => {
this.props.update();
};
render() {
return (
<div>
<WasteWizardEntries
entries={this.props.searchResults}
update={this.update}
/>
<Favorites
entries={this.state.favorites}
update={this.updateSearchResults}
/>
</div>
);
}
}
export default SearchResults;
|
<gh_stars>0
const fs = require('fs');
const inquirer = require('inquirer');
const generateMarkdown = require('./utils/generateMarkdown.js');
const questions = [
//Title
{
type: 'input',
name: 'title',
message: 'What is the title of your README? (Required)',
validate: titleInput => {
if (titleInput) {
return true;
} else {
console.log('A title is required!');
return false;
}
}
},
//Description
{
type: 'input',
name: 'description',
message: 'Provide a description of the README (Required)',
validate: descriptionInput => {
if (descriptionInput) {
return true;
} else {
console.log('You need to enter a README description!');
return false;
}
}
},
//Links (added as extra)
{
type: 'input',
name: 'link',
message: 'Enter the link to your project. (Required)',
validate: linkInput => {
if (linkInput) {
return true;
} else {
console.log('You need to enter a link to your project!');
return false;
}
}
},
//Screenshot (added as extra)
{
type: 'confirm',
name: 'addScreenshots',
message: 'Would you like to add a screenshot?',
},
{
type: 'input',
name: 'url',
message: 'Enter the URL',
when: (urlInput) => urlInput.addScreenshots === true
},
//Installation
{
type: 'input',
name: 'installation',
message: 'How do you install your project? (Required)',
validate: installationInput => {
if (installationInput) {
return true;
} else {
console.log('You need to enter installation information to your project!');
return false;
}
}
},
//Usage
{
type: 'input',
name: 'usage',
message: 'How do you use this project? (Required)',
validate: usageInput => {
if (usageInput) {
return true;
} else {
console.log('You need to provide information on how to use this project!');
return false;
}
}
},
//License
{
type: 'list',
name: 'licenses',
message: 'What license would you like to use for this project?',
choices: ["Apache", "GNU", "MIT", "ISC", "None"]
},
//Contribution
{
type: 'input',
name: 'contribute',
message: 'How can others contribute to this project? (Required)',
validate: contributeInput => {
if (contributeInput) {
return true;
} else {
console.log('You must inform on how to contribute to this project!');
return false;
}
}
},
//Testing
{
type: 'input',
name: 'test',
message: 'How do you test this project? (Required)',
validate: testInput => {
if (testInput) {
return true;
} else {
console.log('You must describe how to test this project!');
return false;
}
}
},
//Username
{
type: 'input',
name: 'github',
message: 'Enter your GitHub Username (Required)',
validate: githubInput => {
if (githubInput) {
return true;
} else {
console.log('Please enter your GitHub username!');
return false;
}
}
},
//Email
{
type: 'input',
name: 'email',
message: 'Please enter your email address (Required)',
validate: emailInput => {
if (emailInput) {
return true;
} else {
console.log('You must enter your email address!');
return false;
}
}
}
];
//Function to write README file
function writeToFile(fileName, data) {
fs.writeFile(fileName, data, (err) => {
if (err)
throw err;
console.log('README complete!')
});
}
//Function to initialize app
function init() {
inquirer.prompt(questions)
.then(function (userInput) {
console.log(userInput)
writeToFile("README.md", generateMarkdown(userInput));
});
}
// Function call to initialize app
init();
|
<gh_stars>0
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_emoji_objects_twotone = void 0;
var ic_emoji_objects_twotone = {
"viewBox": "0 0 24 24",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": []
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"height": "1",
"opacity": ".3",
"width": "4",
"x": "10",
"y": "18"
},
"children": [{
"name": "rect",
"attribs": {
"height": "1",
"opacity": ".3",
"width": "4",
"x": "10",
"y": "18"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"height": "1",
"opacity": ".3",
"width": "4",
"x": "10",
"y": "16"
},
"children": [{
"name": "rect",
"attribs": {
"height": "1",
"opacity": ".3",
"width": "4",
"x": "10",
"y": "16"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M12,3c-0.46,0-0.93,0.04-1.4,0.14C7.84,3.67,5.64,5.9,5.12,8.66c-0.48,2.61,0.48,5.01,2.22,6.56 C7.77,15.6,8,16.13,8,16.69V19c0,1.1,0.9,2,2,2h0.28c0.35,0.6,0.98,1,1.72,1s1.38-0.4,1.72-1H14c1.1,0,2-0.9,2-2v-2.31 c0-0.55,0.22-1.09,0.64-1.46C18.09,13.95,19,12.08,19,10C19,6.13,15.87,3,12,3z M14,19h-4v-1h4V19z M14,17h-4v-1h4V17z M15.31,13.74c-0.09,0.08-0.16,0.18-0.24,0.26H8.92c-0.08-0.09-0.15-0.19-0.24-0.27c-1.32-1.18-1.91-2.94-1.59-4.7 c0.36-1.94,1.96-3.55,3.89-3.93C11.32,5.03,11.66,5,12,5c2.76,0,5,2.24,5,5C17,11.43,16.39,12.79,15.31,13.74z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M12,3c-0.46,0-0.93,0.04-1.4,0.14C7.84,3.67,5.64,5.9,5.12,8.66c-0.48,2.61,0.48,5.01,2.22,6.56 C7.77,15.6,8,16.13,8,16.69V19c0,1.1,0.9,2,2,2h0.28c0.35,0.6,0.98,1,1.72,1s1.38-0.4,1.72-1H14c1.1,0,2-0.9,2-2v-2.31 c0-0.55,0.22-1.09,0.64-1.46C18.09,13.95,19,12.08,19,10C19,6.13,15.87,3,12,3z M14,19h-4v-1h4V19z M14,17h-4v-1h4V17z M15.31,13.74c-0.09,0.08-0.16,0.18-0.24,0.26H8.92c-0.08-0.09-0.15-0.19-0.24-0.27c-1.32-1.18-1.91-2.94-1.59-4.7 c0.36-1.94,1.96-3.55,3.89-3.93C11.32,5.03,11.66,5,12,5c2.76,0,5,2.24,5,5C17,11.43,16.39,12.79,15.31,13.74z"
},
"children": []
}]
}]
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"height": "3",
"width": "1",
"x": "11.5",
"y": "11"
},
"children": [{
"name": "rect",
"attribs": {
"height": "3",
"width": "1",
"x": "11.5",
"y": "11"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"height": "3",
"transform": "matrix(0.7071 -0.7071 0.7071 0.7071 -4.0312 10.8536)",
"width": "1",
"x": "10.59",
"y": "8.79"
},
"children": [{
"name": "rect",
"attribs": {
"height": "3",
"transform": "matrix(0.7071 -0.7071 0.7071 0.7071 -4.0312 10.8536)",
"width": "1",
"x": "10.59",
"y": "8.79"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"height": "3",
"transform": "matrix(-0.7071 -0.7071 0.7071 -0.7071 14.7678 26.7028)",
"width": "1",
"x": "12.41",
"y": "8.79"
},
"children": [{
"name": "rect",
"attribs": {
"height": "3",
"transform": "matrix(-0.7071 -0.7071 0.7071 -0.7071 14.7678 26.7028)",
"width": "1",
"x": "12.41",
"y": "8.79"
},
"children": []
}]
}]
}]
}]
}]
};
exports.ic_emoji_objects_twotone = ic_emoji_objects_twotone; |
<reponame>partynetwork/example-cra-antd
import useSWR from 'swr';
import axios from 'axios';
import { UserInterface } from '../types/user';
const userFetcher = async (): Promise<UserInterface> => {
try {
const { data } = await axios.get('/api/users/me');
return data;
} catch (e) {
throw e.response;
}
};
export default function useUser() {
const { data, mutate, error } = useSWR('api_user', userFetcher);
const loading = !data && !error;
return {
loading,
error: error || undefined,
user: data,
isAuthentication: !!data,
mutate,
};
}
|
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from datadog_checks.kyototycoon import KyotoTycoonCheck
from .common import (
URL
)
def test_check(aggregator, kyototycoon):
"""
Testing Kyototycoon check.
"""
kt = KyotoTycoonCheck('kyototycoon', {}, {})
TAGS = ['optional:tag1']
instance = {
'report_url': '{0}/rpc/report'.format(URL),
'tags': TAGS
}
# run the check
kt.check(instance)
GAUGES = KyotoTycoonCheck.GAUGES.values()
DB_GAUGES = KyotoTycoonCheck.DB_GAUGES.values()
TOTALS = KyotoTycoonCheck.TOTALS.values()
RATES = KyotoTycoonCheck.RATES.values()
# all the RATE type metrics
ALL_RATES = TOTALS + RATES
# prefix every metric with check name (kyototycoon.)
# no replications, so ignore kyototycoon.replication.delay
for mname in GAUGES:
if mname != 'replication.delay':
aggregator.assert_metric('kyototycoon.{0}'.format(mname), tags=TAGS, count=1)
for mname in DB_GAUGES:
aggregator.assert_metric('kyototycoon.{0}'.format(mname), tags=TAGS + ['db:0'], count=1)
for mname in ALL_RATES:
aggregator.assert_metric('kyototycoon.{0}_per_s'.format(mname), tags=TAGS, count=1)
# service check
aggregator.assert_service_check(
KyotoTycoonCheck.SERVICE_CHECK_NAME, status=KyotoTycoonCheck.OK, tags=TAGS, count=1)
aggregator.assert_all_metrics_covered()
|
<filename>persistence-jdbc/scaladsl/src/test/scala/com/lightbend/lagom/scaladsl/persistence/jdbc/JdbcReadSideSpec.scala
/*
* Copyright (C) 2016 Lightbend Inc. <http://www.lightbend.com>
*/
package com.lightbend.lagom.scaladsl.persistence.jdbc
import akka.NotUsed
import akka.persistence.jdbc.query.scaladsl.JdbcReadJournal
import akka.persistence.query.PersistenceQuery
import akka.stream.scaladsl.Source
import com.lightbend.lagom.internal.scaladsl.persistence.PersistentEntityActor
import com.lightbend.lagom.scaladsl.persistence.TestEntity.Evt
import com.lightbend.lagom.scaladsl.persistence._
import scala.concurrent.Future
class JdbcReadSideSpec extends JdbcPersistenceSpec with AbstractReadSideSpec {
lazy val readSide = new JdbcTestEntityReadSide(session)
lazy val queries = PersistenceQuery(system).readJournalFor[JdbcReadJournal](JdbcReadJournal.Identifier)
override def eventStream[Event <: AggregateEvent[Event]](aggregateTag: AggregateEventTag[Event], fromOffset: Offset): Source[EventStreamElement[Event], NotUsed] = {
val tag = aggregateTag.tag
val offset = fromOffset match {
case NoOffset => 0l
case Sequence(value) => value + 1
case other => throw new IllegalArgumentException(s"JDBC does not support ${other.getClass.getSimpleName} offsets")
}
queries.eventsByTag(tag, offset)
.map { env =>
new EventStreamElement[Event](
PersistentEntityActor.extractEntityId(env.persistenceId),
env.event.asInstanceOf[Event],
Sequence(env.offset): Offset
)
}
}
override def processorFactory(): ReadSideProcessor[Evt] = {
new JdbcTestEntityReadSide.TestEntityReadSideProcessor(jdbcReadSide)
}
override def getAppendCount(id: String): Future[Long] = {
readSide.getAppendCount(id)
}
}
|
nvidia-smi
python /userhome/project/pt.darts/augment.py --name MDENAS --dataset cifar10 --genotype "Genotype(
normal=[
[('sep_conv_5x5', 1), ('sep_conv_3x3', 0)],
[('skip_connect', 0), ('sep_conv_5x5', 1)],
[('sep_conv_5x5', 3), ('sep_conv_3x3', 1)],
[('dil_conv_5x5', 3), ('max_pool_3x3', 4)],
],
normal_concat=range(2, 6),
reduce=[
[('max_pool_3x3', 0), ('sep_conv_5x5', 1)],
[('skip_connect', 0), ('skip_connect', 1)],
[('sep_conv_3x3', 3), ('skip_connect', 2)],
[('dil_conv_3x3', 3), ('sep_conv_5x5', 0)],
],
reduce_concat=range(2, 6))" |
<reponame>ddmoyu/gus<filename>src/utils/table.ts
import chalk = require('chalk')
import { table, TableUserConfig } from 'table'
interface User { [key: string]: string }
const config: TableUserConfig = {
header: {
alignment: 'center',
content: chalk.red('Git users list')
},
columns: [
{ alignment: 'center' },
{ alignment: 'right' },
{ alignment: 'right' },
{ alignment: 'center' }
]
}
function list(arr: User[], local: User, global: User): (string | number)[][] {
const tb: (string | number)[][] = [[chalk.greenBright('ID'), chalk.greenBright('Name'), chalk.greenBright('Email'), chalk.greenBright('Status')]]
let idx = 0
const trHex = chalk.hex('#cec889')
if (local.name) {
tb.push([trHex(idx), trHex(local.name), trHex(local.email), chalk.yellow('Local')])
idx++
}
if (global.name) {
tb.push([trHex(idx), trHex(global.name), trHex(global.email), chalk.cyan('Global')])
idx++
}
if (arr.length > 0) {
for (const user of arr) {
const d = []
if (user.name === local.name) {
continue
}
if (user.name === global.name) {
continue
}
d.push(trHex(idx++), trHex(user.name), trHex(user.email), '')
tb.push(d)
}
return tb
}
return tb
}
function listTable(arr: User[], local: User, global: User): string {
const li = list(arr, local, global)
return chalk.gray(table(li, config))
}
export {
list,
listTable
}
|
<reponame>anticipasean/girakkafunc
package cyclops.pure.instances.jdk;
import static cyclops.pure.kinds.CompletableFutureKind.narrowK;
import cyclops.function.higherkinded.DataWitness.completableFuture;
import cyclops.function.higherkinded.DataWitness.future;
import cyclops.function.higherkinded.Higher;
import cyclops.pure.arrow.MonoidK;
import cyclops.pure.arrow.MonoidKs;
import cyclops.container.control.Either;
import cyclops.async.Future;
import cyclops.container.control.Option;
import cyclops.function.combiner.Monoid;
import cyclops.pure.instances.control.FutureInstances;
import cyclops.pure.kinds.CompletableFutureKind;
import cyclops.pure.typeclasses.InstanceDefinitions;
import cyclops.pure.typeclasses.Pure;
import cyclops.pure.typeclasses.comonad.Comonad;
import cyclops.pure.typeclasses.foldable.Foldable;
import cyclops.pure.typeclasses.foldable.Unfoldable;
import cyclops.pure.typeclasses.functor.Functor;
import cyclops.pure.typeclasses.monad.Applicative;
import cyclops.pure.typeclasses.monad.Monad;
import cyclops.pure.typeclasses.monad.MonadPlus;
import cyclops.pure.typeclasses.monad.MonadRec;
import cyclops.pure.typeclasses.monad.MonadZero;
import cyclops.pure.typeclasses.monad.Traverse;
import cyclops.pure.typeclasses.monad.TraverseByTraverse;
import java.util.concurrent.CompletableFuture;
import java.util.function.Function;
import lombok.AllArgsConstructor;
import lombok.experimental.UtilityClass;
/**
* Companion class for creating Type Class instances for working with CompletableFutures
*
* @author johnmcclean
*/
@UtilityClass
public class CompletableFutureInstances {
private final CompletableFutureTypeclasses INSTANCE = new CompletableFutureTypeclasses();
public static InstanceDefinitions<completableFuture> definitions() {
return new InstanceDefinitions<completableFuture>() {
@Override
public <T, R> Functor<completableFuture> functor() {
return CompletableFutureInstances.functor();
}
@Override
public <T> Pure<completableFuture> unit() {
return CompletableFutureInstances.unit();
}
@Override
public <T, R> Applicative<completableFuture> applicative() {
return CompletableFutureInstances.applicative();
}
@Override
public <T, R> Monad<completableFuture> monad() {
return CompletableFutureInstances.monad();
}
@Override
public <T, R> Option<MonadZero<completableFuture>> monadZero() {
return Option.some(CompletableFutureInstances.monadZero());
}
@Override
public <T> Option<MonadPlus<completableFuture>> monadPlus() {
return Option.some(CompletableFutureInstances.monadPlus());
}
@Override
public <T> MonadRec<completableFuture> monadRec() {
return CompletableFutureInstances.monadRec();
}
@Override
public <T> Option<MonadPlus<completableFuture>> monadPlus(MonoidK<completableFuture> m) {
return Option.some(CompletableFutureInstances.monadPlus(m));
}
@Override
public <C2, T> Traverse<completableFuture> traverse() {
return CompletableFutureInstances.traverse();
}
@Override
public <T> Foldable<completableFuture> foldable() {
return CompletableFutureInstances.foldable();
}
@Override
public <T> Option<Comonad<completableFuture>> comonad() {
return Option.none();
}
@Override
public <T> Option<Unfoldable<completableFuture>> unfoldable() {
return Option.none();
}
};
}
public static <T, R> Functor<completableFuture> functor() {
return INSTANCE;
}
public static <T> Pure<completableFuture> unit() {
return INSTANCE;
}
public static <T, R> Applicative<completableFuture> applicative() {
return INSTANCE;
}
public static <T, R> Monad<completableFuture> monad() {
return INSTANCE;
}
public static <T, R> MonadZero<completableFuture> monadZero() {
return INSTANCE;
}
public static <T, R> MonadRec<completableFuture> monadRec() {
return INSTANCE;
}
public static <T> MonadPlus<completableFuture> monadPlus() {
return INSTANCE;
}
public static <T> MonadPlus<completableFuture> monadPlus(MonoidK<completableFuture> m) {
return INSTANCE;
}
public static <L> Traverse<completableFuture> traverse() {
return INSTANCE;
}
public static <L> Foldable<completableFuture> foldable() {
return INSTANCE;
}
@AllArgsConstructor
@lombok.With
public static class CompletableFutureTypeclasses implements MonadPlus<completableFuture>, MonadRec<completableFuture>,
TraverseByTraverse<completableFuture>,
Foldable<completableFuture> {
private final MonoidK<completableFuture> monoidK;
public CompletableFutureTypeclasses() {
monoidK = MonoidKs.firstCompleteCompletableFuture();
}
@Override
public <T> T foldRight(Monoid<T> monoid,
Higher<completableFuture, T> ds) {
return Future.of(narrowK(ds))
.fold(monoid);
}
@Override
public <T> T foldLeft(Monoid<T> monoid,
Higher<completableFuture, T> ds) {
return Future.of(narrowK(ds))
.fold(monoid);
}
@Override
public <T, R> Higher<completableFuture, R> flatMap(Function<? super T, ? extends Higher<completableFuture, R>> fn,
Higher<completableFuture, T> ds) {
return CompletableFutureKind.widen(CompletableFutureKind.narrow(ds)
.thenCompose(fn.andThen(CompletableFutureKind::narrowK)));
}
@Override
public <C2, T, R> Higher<C2, Higher<completableFuture, R>> traverseA(Applicative<C2> applicative,
Function<? super T, ? extends Higher<C2, R>> fn,
Higher<completableFuture, T> ds) {
CompletableFuture<T> future = narrowK(ds);
return applicative.map(CompletableFutureKind::completedFuture,
fn.apply(future.join()));
}
@Override
public <T, R> R foldMap(Monoid<R> mb,
Function<? super T, ? extends R> fn,
Higher<completableFuture, T> ds) {
CompletableFuture<R> opt = narrowK(ds).thenApply(fn);
return Future.of(opt)
.fold(mb);
}
@Override
public <T, R> Higher<completableFuture, R> ap(Higher<completableFuture, ? extends Function<T, R>> fn,
Higher<completableFuture, T> apply) {
return CompletableFutureKind.widen(narrowK(fn).thenCombine(narrowK(apply),
(a, b) -> a.apply(b)));
}
@Override
public <T> Higher<completableFuture, T> unit(T value) {
return CompletableFutureKind.widen(CompletableFuture.completedFuture(value));
}
@Override
public <T, R> Higher<completableFuture, R> map(Function<? super T, ? extends R> fn,
Higher<completableFuture, T> ds) {
return CompletableFutureKind.widen(narrowK(ds).thenApply(fn));
}
@Override
public <T, R> Higher<completableFuture, R> tailRec(T initial,
Function<? super T, ? extends Higher<completableFuture, ? extends Either<T, R>>> fn) {
Higher<future, R> x = FutureInstances.monadRec()
.tailRec(initial,
fn.andThen(CompletableFutureKind::narrowK)
.andThen(Future::of));
return CompletableFutureKind.narrowFuture(x);
}
@Override
public <T> MonoidK<completableFuture> monoid() {
return monoidK;
}
}
}
|
<reponame>giosil/wrapp<gh_stars>0
package org.dew.wrapp.impl;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.net.URL;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.dew.wrapp.App;
import org.dew.wrapp.MenuItem;
import org.dew.wrapp.Page;
import org.dew.wrapp.json.JSON;
import org.dew.wrapp.mgr.IAppManager;
import org.dew.wrapp.util.WUtil;
public
class DefaultAppManager implements IAppManager
{
@Override
public
Map<String, Page> loadPages()
throws Exception
{
String json = loadFile("wrapp_pages.json");
if(json == null || json.length() < 3) {
return new HashMap<String, Page>();
}
Map<String, Object> data = JSON.parseObj(json);
if(data == null || data.isEmpty()) {
return new HashMap<String, Page>();
}
Map<String, Page> mapResult = new HashMap<String, Page>(data.size());
Iterator<Map.Entry<String, Object>> iterator = data.entrySet().iterator();
while(iterator.hasNext()) {
Map.Entry<String, Object> entry = iterator.next();
Object value = entry.getValue();
if(value instanceof Map) {
Page page = WUtil.populateBean(Page.class, WUtil.toMapObject(value));
if(page == null) continue;
page.setId(entry.getKey());
mapResult.put(entry.getKey(), page);
}
}
return mapResult;
}
@Override
public
Map<String, List<MenuItem>> loadMenus()
throws Exception
{
String json = loadFile("wrapp_menus.json");
if(json == null || json.length() < 3) {
return new HashMap<String, List<MenuItem>>();
}
Map<String, Object> data = JSON.parseObj(json);
if(data == null || data.isEmpty()) {
return new HashMap<String, List<MenuItem>>();
}
Map<String, List<MenuItem>> mapResult = new HashMap<String, List<MenuItem>>(data.size());
Iterator<Map.Entry<String, Object>> iterator = data.entrySet().iterator();
while(iterator.hasNext()) {
Map.Entry<String, Object> entry = iterator.next();
Object value = entry.getValue();
if(value instanceof List) {
List<MenuItem> listOfMenuItem = WUtil.toListOfBean(value, MenuItem.class);
if(listOfMenuItem == null || listOfMenuItem.size() == 0) {
continue;
}
// Normalize menu items
for(MenuItem menuItem : listOfMenuItem) {
String link = menuItem.getLink();
if(link != null && link.length() > 0) {
String parent = menuItem.getParent();
if(parent == null || parent.length() == 0) {
String menuItemId = menuItem.getId();
if(menuItemId != null) {
int iLastSep = menuItemId.lastIndexOf('.');
if(iLastSep > 0) {
menuItem.setParent(menuItemId.substring(0, iLastSep));
}
}
}
}
}
mapResult.put(entry.getKey(), listOfMenuItem);
}
}
return mapResult;
}
protected static
String loadFile(String sFile)
throws Exception
{
String sUserHome = System.getProperty("user.home");
String sFilePath = sUserHome + File.separator + App.CONFIG_FOLDER_NAME + File.separator + sFile;
byte[] content = null;
File file = new File(sFilePath);
if(file.exists()) {
content = readFile(sFilePath);
}
else {
content = readFile(sFile);
}
if(content == null || content.length < 3) {
return null;
}
return new String(content);
}
protected static
byte[] readFile(String sFile)
throws Exception
{
int iFileSep = sFile.indexOf('/');
if(iFileSep < 0) iFileSep = sFile.indexOf('\\');
InputStream is = null;
if(iFileSep < 0) {
URL url = Thread.currentThread().getContextClassLoader().getResource(sFile);
is = url.openStream();
}
else {
is = new FileInputStream(sFile);
}
try {
int n;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] buff = new byte[1024];
while((n = is.read(buff)) > 0) baos.write(buff, 0, n);
return baos.toByteArray();
}
finally {
if(is != null) try{ is.close(); } catch(Exception ex) {}
}
}
}
|
#!/bin/bash
# Usage: build-passenger-orig-tarball.sh <OUTPUT> <NGINX_MODULE_TARBALL>
# Builds the Passenger RPM source tarball from a Passenger source directory.
#
# Required environment variables:
#
# PASSENGER_VERSION
# PASSENGER_TARBALL_NAME
# PASSENGER_TARBALL
# NGINX_TARBALL
set -e
ROOTDIR=`dirname "$0"`
ROOTDIR=`cd "$ROOTDIR/../.." && pwd`
source "$ROOTDIR/internal/lib/library.sh"
require_args_exact 2 "$@"
require_envvar PASSENGER_VERSION "$PASSENGER_VERSION"
require_envvar PASSENGER_TARBALL_NAME "$PASSENGER_TARBALL_NAME"
require_envvar PASSENGER_TARBALL "$PASSENGER_TARBALL"
require_envvar NGINX_TARBALL "$NGINX_TARBALL"
header "Creating Passenger official tarball"
# /passenger is mounted read-only, but 'rake' may have to create files, e.g.
# to generate documentation files. So we copy it to a temporary directory
# which is writable.
run rm -rf /tmp/passenger
if [[ -e /passenger/.git ]]; then
run mkdir /tmp/passenger
echo "+ cd /passenger (expecting local git repo to copy from)"
cd /passenger
echo "+ Copying all git committed files to /tmp/passenger"
(
set -o pipefail
git archive --format=tar HEAD | tar -C /tmp/passenger -x
submodules=`git submodule status | awk '{ print $2 }'`
for submodule in $submodules; do
echo "+ Copying all git committed files from submodule $submodule"
pushd $submodule >/dev/null
mkdir -p /tmp/passenger/$submodule
git archive --format=tar HEAD | tar -C /tmp/passenger/$submodule -x
popd >/dev/null
done
)
if [[ $? != 0 ]]; then
exit 1
fi
else
run cp -dpR /passenger /tmp/passenger
fi
echo "+ cd /tmp/passenger"
cd /tmp/passenger
run mkdir ~/pkg
# the Passenger dev gems are used here, so check the top level Gemfile
run rake package:set_official package:tarball CACHING=false PKG_DIR=~/pkg
header "Extracting Passenger tarball"
echo "+ cd ~/pkg"
cd ~/pkg
run tar xzf $PASSENGER_TARBALL
run rm -f $PASSENGER_TARBALL
header "Extracting Nginx into Passenger directory"
echo "+ cd $PASSENGER_TARBALL_NAME-$PASSENGER_VERSION"
cd $PASSENGER_TARBALL_NAME-$PASSENGER_VERSION
run tar xzf ~/rpmbuild/SOURCES/$NGINX_TARBALL
header "Extracting Nginx into Passenger directory for Module"
run tar xzf ~/rpmbuild/SOURCES/${NGINX_TARBALL_NAME}-${2}.tar.gz
header "Packaging up"
cd ..
echo "+ Normalizing timestamps"
find $PASSENGER_TARBALL_NAME-$PASSENGER_VERSION -print0 | xargs -0 touch -d '2013-10-27 00:00:00 UTC'
echo "+ Creating final orig tarball"
tar -c $PASSENGER_TARBALL_NAME-$PASSENGER_VERSION | gzip --no-name --best > "$1"
run rm -rf ~/pkg
|
NODE_LAMBDA=./node_modules/node-lambda/bin/node-lambda
$NODE_LAMBDA run -x test/context.json -j test/sns-codepipeline-event-pipeline-started.json
$NODE_LAMBDA run -x test/context.json -j test/sns-codepipeline-event-stage-started.json
$NODE_LAMBDA run -x test/context.json -j test/sns-codepipeline-event-stage-succeeded.json
$NODE_LAMBDA run -x test/context.json -j test/sns-codepipeline-event-stage-failed.json
$NODE_LAMBDA run -x test/context.json -j test/sns-cloudwatch-event.json
$NODE_LAMBDA run -x test/context.json -j test/sns-cloudwatch-event-metricmath.json
$NODE_LAMBDA run -x test/context.json -j test/sns-event.json
$NODE_LAMBDA run -x test/context.json -j test/sns-elastic-beanstalk-event.json
$NODE_LAMBDA run -x test/context.json -j test/sns-codedeploy-event.json
$NODE_LAMBDA run -x test/context.json -j test/sns-codedeploy-configuration.json
$NODE_LAMBDA run -x test/context.json -j test/sns-elasticache-event.json
$NODE_LAMBDA run -x test/context.json -j test/sns-autoscaling-event.json
|
function(d){}
|
<filename>lib/devise_authorizable.rb
require 'devise'
require 'cancan'
require 'devise_authorizable/devise'
module DeviseAuthorizable
autoload :VERSION, 'devise_authorizable/version'
autoload :Controller, 'devise_authorizable/controller'
module Model
autoload :Role, 'devise_authorizable/model/role'
autoload :Ability, 'devise_authorizable/model/ability'
end
end
require 'devise_authorizable/engine'
|
//
// LSUTimeType.h
// LSBluetooth-Library
//
// Created by lshenrong on 17/2/13.
// Copyright © 2017年 Lifesense. All rights reserved.
//
#import "LSDBaseModel.h"
#import "LSConst.h"
@interface LSDTimeType : LSDBaseModel
@property (nonatomic, assign) TimeType timeType;
@end
|
@WebServlet("/sendBirthdayEmail")
public class BirthdayEmailServlet extends HttpServlet {
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// Fetch the user's email address, name, etc. from the database
String emailAddress = ...;
String name = ...
// Construct the email using the fetched data
String recipient = emailAddress;
String subject = "Happy Birthday!";
String body = "Happy Birthday, " + name + "! Wishing you a day filled with joy and a year full of happiness!";
// Send the email
EmailService.sendEmail(recipient, subject, body);
}
} |
#!/usr/bin/env bash
set -e
cmd=(golangci-lint run)
export GO111MODULE=off
OPTIONS=()
# Build options list, ignoring '-', '--', and anything after
#
while [ $# -gt 0 ] && [ "$1" != "-" ] && [ "$1" != "--" ]; do
OPTIONS+=("$1")
shift
done
"${cmd[@]}" "${OPTIONS[@]}" ./...
|
<reponame>dangdangdotcom/incubator-skywalking
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.ui.query;
import java.text.ParseException;
import org.apache.skywalking.apm.collector.core.module.ModuleManager;
import org.apache.skywalking.apm.collector.core.util.ObjectUtils;
import org.apache.skywalking.apm.collector.storage.ui.alarm.Alarm;
import org.apache.skywalking.apm.collector.storage.ui.alarm.AlarmType;
import org.apache.skywalking.apm.collector.storage.ui.common.Duration;
import org.apache.skywalking.apm.collector.storage.ui.common.Pagination;
import org.apache.skywalking.apm.collector.ui.graphql.Query;
import org.apache.skywalking.apm.collector.ui.service.AlarmService;
import org.apache.skywalking.apm.collector.ui.utils.DurationUtils;
import org.apache.skywalking.apm.collector.ui.utils.PaginationUtils;
/**
* @author peng-yongsheng
*/
public class AlarmQuery implements Query {
private final ModuleManager moduleManager;
private AlarmService alarmService;
public AlarmQuery(ModuleManager moduleManager) {
this.moduleManager = moduleManager;
}
private AlarmService getAlarmService() {
if (ObjectUtils.isEmpty(alarmService)) {
this.alarmService = new AlarmService(moduleManager);
}
return alarmService;
}
public Alarm loadAlarmList(String keyword, AlarmType alarmType, Duration duration,
Pagination paging) throws ParseException {
long startTimeBucket = DurationUtils.INSTANCE.durationToSecondTimeBucket(duration.getStep(), duration.getStart()) / 100;
long endTimeBucket = DurationUtils.INSTANCE.durationToSecondTimeBucket(duration.getStep(), duration.getEnd()) / 100;
PaginationUtils.Page page = PaginationUtils.INSTANCE.exchange(paging);
switch (alarmType) {
case APPLICATION:
return getAlarmService().loadApplicationAlarmList(keyword, duration.getStep(), startTimeBucket, endTimeBucket, page.getLimit(), page.getFrom());
case SERVER:
return getAlarmService().loadInstanceAlarmList(keyword, duration.getStep(), startTimeBucket, endTimeBucket, page.getLimit(), page.getFrom());
case SERVICE:
return getAlarmService().loadServiceAlarmList(keyword, duration.getStep(), startTimeBucket, endTimeBucket, page.getLimit(), page.getFrom());
default:
return new Alarm();
}
}
}
|
class Book:
def __init__(self, title, author, year):
self.title = title
self.author = author
self.year = year
def get_title(self):
return self.title
def get_author(self):
return self.author
def get_year(self):
return self.year |
/**
* @author <NAME>
*/
export enum SortOrder {
ASCENDING,
DESCENDING
}
|
<reponame>ArjixWasTaken/YifyAPI
#!/usr/bin/env python3
from setuptools import setup, find_packages
with open('README.md', 'r') as f:
long_description = f.read()
def get_version():
import feedparser
r = feedparser.parse('https://pypi.org/rss/project/yifyapi/releases.xml')['entries'][0]['title']
f = feedparser.parse('https://pypi.org/rss/project/yifyapi/releases.xml')['entries'][0]['title']
if int(r.split('.')[-1]) == 10:
ff = int(r.split('.')[-2]) + 1
l = 0
else:
ff = int(r.split('.')[-2])
l = int(r.split('.')[-1]) + 1
__version__ = f.split('.')
__version__ = __version__[0] + '.' + str(ff) + '.' + str(l)
return __version__
version = get_version()
setup(
name = 'YifyAPI',
version = version,
author = 'ArjixGamer',
author_email = '<EMAIL>',
description = 'A scraping API for Yify.',
packages = find_packages(),
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires = '>=3.6',
url = 'https://github.com/ArjixGamer/YifyAPI',
keywords = ['movies', 'yify', 'torrents', 'download', 'hd'],
install_requires = [
'beautifulsoup4>=4.6.0',
'requests>=2.18.4',
],
extras_require = {},
long_description = long_description,
long_description_content_type = 'text/markdown'
) |
SELECT TOP 5 salary
FROM table
ORDER BY salary DESC; |
from typing import List, Tuple, Union
def generate_rules(board: List[List[Union[str, int]]]) -> List[Tuple[int, List[Tuple[int, int]]]]:
rules = []
uncovered_cells = []
adjacent_to_empty = set()
# Helper function to get adjacent cell coordinates
def get_adjacent_cells(row, col):
adjacent = []
for i in range(max(0, row - 1), min(row + 2, len(board))):
for j in range(max(0, col - 1), min(col + 2, len(board[0]))):
if (i, j) != (row, col):
adjacent.append((i, j))
return adjacent
# Iterate through the board to generate rules
for i in range(len(board)):
for j in range(len(board[0])):
cell = board[i][j]
if cell == "covered":
uncovered_cells.append((i, j))
elif isinstance(cell, int):
rules.append((cell, [(i, j)]))
elif cell == "mine":
rules.append((1, [(i, j)]))
if isinstance(cell, int) and cell == 0:
adjacent_to_empty.update(get_adjacent_cells(i, j))
# Create a rule for all uncovered cells
uncovered_rule = [(1, cell) for cell in uncovered_cells]
rules.extend(uncovered_rule)
# Create a rule for cells adjacent to 'blank'/'empty' cells
adjacent_to_empty_rule = []
for cell in adjacent_to_empty:
if cell not in uncovered_cells:
adjacent_mines = sum(1 for adj_cell in get_adjacent_cells(cell[0], cell[1]) if board[adj_cell[0]][adj_cell[1]] == "mine")
adjacent_to_empty_rule.append((adjacent_mines, cell))
rules.extend(adjacent_to_empty_rule)
return rules |
#include "doctest.h"
#include "util/audio_frame.hh"
using TestAudioFrame = AudioFrame<int32_t, 24>;
TEST_CASE("audio_frame_tests: input_scales") {
TestAudioFrame a;
a.chan[0] = 0x00000000;
CHECK(((float)TestAudioFrame::scaleInput(a.chan[0])) == doctest::Approx(0.0));
a.chan[0] = 0x00400000;
CHECK(((float)TestAudioFrame::scaleInput(a.chan[0])) == doctest::Approx(0.5));
a.chan[0] = 0x007FFFFF;
CHECK(((float)TestAudioFrame::scaleInput(a.chan[0])) == doctest::Approx(1.0));
a.chan[1] = 0x800000;
CHECK(((float)TestAudioFrame::scaleInput(a.chan[1])) == doctest::Approx(-1.0));
a.chan[1] = 0xC00000;
CHECK(((float)TestAudioFrame::scaleInput(a.chan[1])) == doctest::Approx(-0.5));
a.chan[1] = 0xFFFFFF;
CHECK(((float)TestAudioFrame::scaleInput(a.chan[1])) == doctest::Approx(0.0));
}
TEST_CASE("audio_frame_tests: output_scales") {
const float smallest_val = 1.f / 8388608.f;
CHECK(TestAudioFrame::scaleOutput(-1.f) == -8388608);
CHECK(TestAudioFrame::scaleOutput(-1.f + smallest_val) == -8388607);
CHECK(TestAudioFrame::scaleOutput(-1.f + 2 * smallest_val) == -8388606);
CHECK(TestAudioFrame::scaleOutput(-0.5f) == -4194304);
CHECK(TestAudioFrame::scaleOutput(-2 * smallest_val) == -2);
CHECK(TestAudioFrame::scaleOutput(-smallest_val) == -1);
CHECK(TestAudioFrame::scaleOutput(0.f) == 0);
CHECK(TestAudioFrame::scaleOutput(smallest_val) == 1);
CHECK(TestAudioFrame::scaleOutput(2 * smallest_val) == 2);
CHECK(TestAudioFrame::scaleOutput(0.125f) == 0x100000);
CHECK(TestAudioFrame::scaleOutput(0.25f) == 0x200000);
CHECK(TestAudioFrame::scaleOutput(0.5f) == 0x400000);
CHECK(TestAudioFrame::scaleOutput(1.0f - 2 * smallest_val) == 0x7FFFFE);
CHECK(TestAudioFrame::scaleOutput(1.0f - smallest_val) == 0x7FFFFF);
CHECK(TestAudioFrame::scaleOutput(1.0f) == 0x7FFFFF);
}
|
<gh_stars>0
package com.alipay.api.response;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.AlipayResponse;
/**
* ALIPAY API: ant.merchant.expand.indirect.online.modify response.
*
* @author <NAME>
* @since 1.0, 2021-06-17 10:43:17
*/
public class AntMerchantExpandIndirectOnlineModifyResponse extends AlipayResponse {
private static final long serialVersionUID = 5528269943885935991L;
/**
* 商户在支付宝入驻成功后,生成的支付宝内全局唯一的商户编号
*/
@ApiField("sub_merchant_id")
private String subMerchantId;
public void setSubMerchantId(String subMerchantId) {
this.subMerchantId = subMerchantId;
}
public String getSubMerchantId( ) {
return this.subMerchantId;
}
}
|
#include <bits/stdc++.h>
using namespace std;
int getMinVertex(bool * visited,int * weight,int n)
{
int minVertex = -1;
for(int i=0;i<n;i++)
{
if(!visited[i] && (minVertex == -1 || weight[minVertex] > weight[i]))
minVertex = i;
}
return minVertex;
}
void prims(int ** edges,int n)
{
int * parent = new int[n];
int * weight = new int[n];
bool *visited = new bool[n]();
for(int i=0;i<n;i++)
weight[i] = INT_MAX;
parent[0] = -1;
weight[0] = 0;
for(int i=0;i<n-1;i++)
{
int minVertex = getMinVertex(visited,weight,n);
visited[minVertex] = true;
for(int j=0;j<n;j++)
{
if(edges[minVertex][j] != 0 && !visited[j])
{
if(weight[j] > edges[minVertex][j])
{
weight[j] = edges[minVertex][j];
parent[j] = minVertex;
}
}
}
}
for(int i=1;i<n;i++)
{
if(parent[i] < i)
cout << parent[i] << " " << i << " " << weight[i] << endl;
else
cout << i << " " << parent[i] << " " << weight[i] << endl;
}
delete [] parent;
delete [] weight;
delete [] visited;
}
int main()
{
int V, E, tempX, tempY;
cin >> V >> E;
/*
Write Your Code Here
Complete the Rest of the Program
You have to Print the output yourself
*/
int ** edges = new int*[V];
for(int i=0;i<V;i++)
{
edges[i] = new int[V];
for(int j=0;j<V;j++)
edges[i][j] = 0;
}
for(int i=0;i<E;i++)
{
cin >> tempX >> tempY;
cin >> edges[tempX][tempY];
edges[tempY][tempX] = edges[tempX][tempY];
}
prims(edges,V);
return 0;
}
|
#!/bin/bash
# To git clone into non-empty directory:
# https://stackoverflow.com/a/33695754/209647
# Move out of scripts dir
cd ..
# mainloop
cd main/mainloop
git init .
git remote add origin https://github.com/bensherlock/micropython-hudson-sensor-mainloop.git
git pull origin master
cd ../..
# pybd_expansion
cd main/pybd_expansion
git init .
git remote add origin https://github.com/bensherlock/micropython-pybd-expansion.git
git pull origin master
cd ../..
# uac_modem
cd main/uac_modem
git init .
git remote add origin https://github.com/bensherlock/micropython-unm3-pybd.git
git pull origin master
cd ../..
# uac_network
cd main/uac_network
git init .
git remote add origin https://github.com/bensherlock/micropython-hudson-network.git
git pull origin main
cd ../..
|
import random
# Define an array for each 5x5 crossword puzzle.
crossword_puzzle = [[0 for x in range(5)] for y in range(5)]
# Get a list of words to be used in the crossword puzzle.
words = ["Apple", "Banana", "Carrot"]
# Set up an array that stores the letter position of each word in the crossword puzzle.
letter_positions = []
# Iterate through each word in the list of words.
for word in words:
# Pick a random direction - horizontal or vertical.
direction = random.choice(["HORIZONTAL", "VERTICAL"])
# Pick a random row/column position to place the word.
if direction == "HORIZONTAL":
row = random.randint(0, 4)
column = random.randint(0, 5 - len(word))
else:
row = random.randint(0, 5 - len(word))
column = random.randint(0, 4)
# Store the letters' positions in the array.
new_letter_positions = []
for letter_pos in range(len(word)):
row_offset = 0
column_offset = 0
if direction == "HORIZONTAL":
column_offset = letter_pos
else:
row_offset = letter_pos
new_letter_positions.append([row + row_offset, column + column_offset])
# Check that the new letter positions do not intersect with other words.
intersection = False
for letter_position in new_letter_positions:
if letter_position in letter_positions:
intersection = True
break
# If there is no intersection, add the word to the crossword.
if not intersection:
letter_positions += new_letter_positions
for letter_pos in range(len(word)):
crossword_puzzle[row + letter_pos][column + letter_pos] = word[letter_pos]
# If there is an intersection, update the list of words to exclude the current word.
else:
words = [word for word in words if word != word]
# Print the solution to the crossword puzzle.
for row in crossword_puzzle:
print(str(row)) |
import express from 'express';
import path from 'path';
import slash from 'slash';
import Video from '../models/video.model.js';
import {
search,
getTotals,
getRandomVideo,
fields,
getSimilarVideos,
limitVideoList,
} from '../utilities/video.utility.js';
const router = express.Router();
router.get('/search/:page', async function (req, res) {
const page = parseInt(req.params.page) || 0;
let pattern = {};
let options = {};
if (req.query.search) {
}
let videos;
try {
videos = await search(req.query, page);
}
catch (err) {
if (parsedEnv.VERBOSE) console.error(err);
return res.sendStatus(500);
}
let totals;
let randomVideo;
if (page === 0) {
try {
totals = await getTotals(req.query);
} catch (err) {
if (parsedEnv.VERBOSE) console.error(err);
return res.sendStatus(500);
}
try {
randomVideo = await getRandomVideo(req.query, totals.count);
} catch (err) {
if (parsedEnv.VERBOSE) console.error(err);
return res.sendStatus(500);
}
}
res.json({
videos,
totals,
randomVideo,
});
});
router.get('/:extractor/:id', async (req, res) => {
let video;
let uploaderVideos;
let playlistVideos;
let jobVideos;
let uploaderVideosOffset;
let playlistVideosOffset;
let jobVideosOffset;
try {
video = (await Video.findOne({
extractor: req.params.extractor,
id: req.params.id
}, 'id extractor viewCount uploadDate videoFile directory resolution'
+ ' uploaderDocument fps webpageUrl dateDownloaded width height'
+ ' likeCount dislikeCount subtitleFiles jobDocument mediumResizedThumbnailFile'
+ ' license ageLimit seasonNumber episodeNumber trackNumber discNumber'
+ ' releaseYear format tbr asr vbr vcodec acodec ext ' + fields
)
.populate('uploaderDocument jobDocument')
.exec()
)?.toJSON();
if (!video) return res.sendStatus(404);
if (video.uploader) uploaderVideos = await Video.find(
{ uploader: video.uploader },
'-_id extractor id title uploader duration directory smallResizedThumbnailFile viewCount width height')
.sort({ uploadDate: -1 })
.lean()
.exec();
if (video.playlist) playlistVideos = await Video.find(
{ playlist: video.playlist },
'-_id extractor id title uploader duration directory smallResizedThumbnailFile viewCount width height')
.sort({ playlistIndex: 1 })
.lean()
.exec();
jobVideos = await Video.find(
{ jobDocument: video.jobDocument },
'-_id extractor id title uploader duration directory smallResizedThumbnailFile viewCount width height')
.sort({ dateDownloaded: -1 })
.lean()
.exec();
if (uploaderVideos) [uploaderVideos, uploaderVideosOffset] = limitVideoList(uploaderVideos, video);
if (playlistVideos) [playlistVideos, playlistVideosOffset] = limitVideoList(playlistVideos, video);
if (jobVideos) [jobVideos, jobVideosOffset] = limitVideoList(jobVideos, video);
} catch (err) {
console.error(err)
return res.sendStatus(500);
}
let similarVideos;
try {
similarVideos = await getSimilarVideos(video);
} catch (err) {
return res.sendStatus(500);
}
res.json({
video,
uploaderVideos,
playlistVideos,
jobVideos,
uploaderVideosOffset,
playlistVideosOffset,
jobVideosOffset,
similarVideos,
localVideoPath: slash(path.join(parsedEnv.OUTPUT_DIRECTORY, 'videos', video.directory, video.videoFile.name)),
});
});
export default router;
|
<reponame>kkyu12/0325
package Mar25th;
public class Report3 {
public static void main(String[] args) {
// TODO Auto-generated method stub
System.out.print("2 x 2 = 4\n");
System.out.print("2 x 3 = 6\n");
System.out.print("2 x 4 = 8\n");
System.out.print("2 x 5 = 10\n");
System.out.print("2 x 6 = 12\n");
System.out.print("2 x 7 = 14\n");
System.out.print("2 x 8 = 16\n");
System.out.print("2 x 9 = 18\n");
System.out.print("3 x 2 = 6\n");
System.out.print("3 x 3 = 9\n");
System.out.print("3 x 4 = 12\n");
System.out.print("3 x 5 = 15\n");
System.out.print("3 x 6 = 18\n");
System.out.print("3 x 7 = 21\n");
System.out.print("3 x 8 = 24\n");
System.out.print("3 x 9 = 27\n");
System.out.print("4 x 2 = 8\n");
System.out.print("4 x 3 = 12\n");
System.out.print("4 x 4 = 16\n");
System.out.print("4 x 5 = 20\n");
System.out.print("4 x 6 = 24\n");
System.out.print("4 x 7 = 28\n");
System.out.print("4 x 8 = 32\n");
System.out.print("4 x 9 = 36\n");
System.out.print("5 x 2 = 10\n");
System.out.print("5 x 3 = 15\n");
System.out.print("5 x 4 = 20\n");
System.out.print("5 x 5 = 25\n");
System.out.print("5 x 6 = 30\n");
System.out.print("5 x 7 = 35\n");
System.out.print("5 x 8 = 40\n");
System.out.print("5 x 9 = 45\n");
System.out.print("6 x 2 = 12\n");
System.out.print("6 x 3 = 18\n");
System.out.print("6 x 4 = 24\n");
System.out.print("6 x 5 = 30\n");
System.out.print("6 x 6 = 36\n");
System.out.print("6 x 7 = 42\n");
System.out.print("6 x 8 = 48\n");
System.out.print("6 x 9 = 54\n");
System.out.print("7 x 2 = 14\n");
System.out.print("7 x 3 = 21\n");
System.out.print("7 x 4 = 28\n");
System.out.print("7 x 5 = 35\n");
System.out.print("7 x 6 = 42\n");
System.out.print("7 x 7 = 49\n");
System.out.print("7 x 8 = 56\n");
System.out.print("7 x 9 = 63\n");
System.out.print("8 x 2 = 16\n");
System.out.print("8 x 3 = 24\n");
System.out.print("8 x 4 = 32\n");
System.out.print("8 x 5 = 40\n");
System.out.print("8 x 6 = 48\n");
System.out.print("8 x 7 = 56\n");
System.out.print("8 x 8 = 64\n");
System.out.print("8 x 9 = 72\n");
System.out.print("9 x 2 = 18\n");
System.out.print("9 x 3 = 27\n");
System.out.print("9 x 4 = 36\n");
System.out.print("9 x 5 = 45\n");
System.out.print("9 x 6 = 54\n");
System.out.print("9 x 7 = 63\n");
System.out.print("9 x 8 = 72\n");
System.out.print("9 x 9 = 81\n");
}
}
|
#! /bin/bash
# Display diffs between filename and filename.new; prompt user for
# verification; and either apply or discard the changes.
#
# Usage:
#
# bash verify_changes.sh filename
diff ${1} ${1}.new
echo '---'
read -p 'Is this change OK? (Y/N) ' choice
case "${choice}" in
y|Y ) echo 'Applying changes.'; mv ${1}.new ${1};;
* ) echo 'Reverting.'; rm ${1}.new;;
esac
|
function findSumOfDigits(n) {
let sum = 0;
while (n) {
sum += n % 10;
n = Math.trunc(n / 10);
}
return sum;
} |
"use strict";
/**
* Since only a single constructor is being exported as module.exports this comment isn't documented.
* The class and module are the same thing, the contructor comment takes precedence.
* @module FieldOfSquaresDrawnItemFactory
*/
var paper = require('paper/dist/paper-core.js');
/**
* Factory which creates a field of squares with some randomness. Ie not patterned. Intended for asteroids or debris fields.
* Produces 4 squares per hex. Default orientation is point up and point down. Squares are skewed per the perspective
* @constructor
* @param {external:cartesian-hexagonal} hexDefinition - The DTO defining the hex <--> cartesian relation
* @param {integer} minSize - The minimum size of the squares
* @param {integer} maxSize - The maximum size of the squares
* @param {colors} colors - An array of color strings the squares can be
*/
module.exports = function FieldOfSquaresDrawnItemFactory(hexDefinition, minSize, maxSize, colors) {
this.hexDefinition = hexDefinition;
this.minSize = minSize;
this.maxSize = maxSize;
this.colors = colors;
};
/**
* Return a group of items representing the field
* @override
* @param {Object} item - The DTO to produce a paper.js drawn item for
* @param {onClick=} item.onClick - The callback to use when this item is clicked
* @returns {external:Item} The paper.js Group for the given parameters
* @implements {DrawnItemFactory#getDrawnItem}
* @todo Make the random numbers seeded, so the same field is produced each time
*/
module.exports.prototype.getDrawnItem = function(item) {
//Make our group
var fieldGroup = new paper.Group();
fieldGroup.pivot = new paper.Point(0, 0);
fieldGroup.data.item = item;
//Create 4 random diamonds, each located in 1 quarter of the hex
//Start with the top left quarter
fieldGroup.addChild(this.createSquare(-1*this.hexDefinition.hexagon_edge_to_edge_width/2, 0, -1*this.hexDefinition.hexagon_half_wide_width, 0));
//Next the top right quarter
fieldGroup.addChild(this.createSquare(0, this.hexDefinition.hexagon_edge_to_edge_width/2, -1*this.hexDefinition.hexagon_half_wide_width, 0));
//Next bottom right quarter
fieldGroup.addChild(this.createSquare(0, this.hexDefinition.hexagon_edge_to_edge_width/2, 0 , this.hexDefinition.hexagon_half_wide_width));
//Finally bottom left quarter
fieldGroup.addChild(this.createSquare(-1*this.hexDefinition.hexagon_edge_to_edge_width/2, 0, 0,this.hexDefinition.hexagon_half_wide_width));
//TODO Rasterize the group?
return fieldGroup;
};
/**
* Produces a square within the given quarter of the Hex
* @param {integer} minX - The minimum X coordinate to randomise the square's center
* @param {integer} maxX - The maximum X coordinate to randomise the square's center
* @param {integer} minY - The minimum Y coordinate to randomise the square's center
* @param {integer} maxY - The maximum Y coordinate to randomise the square's center
* @returns {external:Item} The square to include in the group
*/
module.exports.prototype.createSquare = function (minX, maxX, minY, maxY) {
var drawnItem;
var x = this.random(minX, maxX);
var y = this.random(minY, maxY);
var hexCoords = this.hexDefinition.getReferencePoint(x, y);
//The randomised co-ordinates have to be within the hex itself
//TODO There are faster ways to do this.
while (hexCoords.u !== 0 || hexCoords.v !== 0) {
x = this.random(minX, maxX);
y = this.random(minY, maxY);
hexCoords = this.hexDefinition.getReferencePoint(x, y);
}
//Pick a random shade
var color = this.colors[this.random(0, this.colors.length - 1)];
//Pick a random size within limits
var size = this.random(this.minSize, this.maxSize);
drawnItem = new paper.Path.RegularPolygon({
center: [x, y],
sides: 4,
radius: size,
fillColor: color,
strokeColor: 'black'
});
//Tried a random rotation, but diamonds were more attractive
//var rotation = this.random(0, 89); //They're squares. Rotating past 89 is pointless
drawnItem.rotate(45);
//Scale it
drawnItem.scale(1, this.hexDefinition.vScale);
return drawnItem;
};
/**
* Helper method for generating a random number
* @param {integer} min - The minimum number to generate
* @param {integer} max - The maximum number to generate
*/
module.exports.prototype.random = function (min, max) {
return Math.round((Math.random() * (max - min)) + min);
}; |
import logging
import sys
import traceback
class Logger(object):
FMT = '[%(asctime)s - %(name)s] %(message)s'
DEFAULT_LEVEL = logging.INFO
AUTHN_FMT = "{username} (from group:{group}) authenticated {source} at {location}: {success}"
AUTHZ_FMT = "{principal} (using role {role}) performed {action} on {object} at {location}: {success}"
ACTION_FMT = "{principal} performed {action} (details: {details}) at {location}"
EXC_FMT = "exception {exc} (details: {details}) at {location}"
def __init__(self, name, level=DEFAULT_LEVEL, fmt=FMT):
self.name = name
self.logger = logging.getLogger(name)
if self.logger.hasHandlers():
self.logger.handlers.clear()
self.logger.setLevel(level)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(level)
formatter = logging.Formatter(fmt)
ch.setFormatter(formatter)
self.logger.addHandler(ch)
def info(self, msg):
self.logger.info(msg)
def debug(self, msg):
self.logger.debug(msg)
def error(self, msg):
self.logger.error(msg)
def critical(self, msg):
self.logger.critical(msg)
def authenticate_user(self, location, username, success=False, source='', group=None):
msg = self.AUTHN_FMT.format(**{
'username': username,
'location': location,
'success': success,
'source': source,
'group': group,})
return self.info(msg)
def authenticate_token(self, location, token_name, success=False, source='', group=None):
msg = self.AUTHN_FMT.format(**{
'username':token_name,
'location':location,
'success':success,
'source':source,
'group':group,
})
return self.info(msg)
def authorize_user(self, location, principal, action, object_, success=False, role=None):
msg = self.AUTHZ_FMT.format(**{
'principal':principal,
'action':action,
'success':success,
'object':object_,
'location':location,
'role':role,
})
return self.info(msg)
def action(self, location, principal, action, details=None):
msg = self.ACTION_FMT.format(**{
'principal':principal,
'action':action,
'details':details,
'location':location,
})
return self.info(msg)
def exception(self, location, details):
msg = self.EXC_FMT.format(**{
'location':location,
'details':details,
'exc':traceback.format_exc()})
return self.error(msg)
|
<gh_stars>0
// GB - try to make this compile, need a class X, headers, ...
#include <utility> // std::move
#include <memory> // std::shared_ptr
#include <type_traits> // std::remove_reference
#include <iostream>
using namespace std;
template <class T>
class X {
T t;
public:
X() {}
X(T tt) : t(tt) {}
};
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// http://thbecker.net/articles/rvalue_references/section_07.html
// Perfect Forwarding: The Problem
// The other problem besides move semantics that rvalue references were designed to solve is the perfect forwarding problem. Consider the following simple factory function:
template<typename T, typename Arg>
shared_ptr<T> factory(Arg arg)
{
return shared_ptr<T>(new T(arg));
}
// Obviously, the intent here is to forward the argument arg from the factory function to T's constructor. Ideally, as far as arg is concerned, everything should behave just as if the factory function weren't there and the constructor were called directly in the client code: perfect forwarding. The code above fails miserably at that: it introduces an extra call by value, which is particularly bad if the constructor takes its argument by reference.
// The most common solution, chosen e.g. by boost::bind, is to let the outer function take the argument by reference:
template<typename T, typename Arg>
shared_ptr<T> factory2(Arg& arg)
{
return shared_ptr<T>(new T(arg));
}
// That's better, but not perfect. The problem is that now, the factory function cannot be called on rvalues:
// factory2<X>(hoo()); // error if hoo returns by value
int& hoo() { int *p = new int; *p =1; return *p; } // GB
// template <>
// factory2<int>(hoo()); // error if hoo returns by value // GB
// factory2<X>(41); // error
// This can be fixed by providing an overload which takes its argument by const reference:
template<typename T, typename Arg>
shared_ptr<T> factory3(Arg const & arg)
{
return shared_ptr<T>(new T(arg));
}
#if 0
// There are two problems with this approach. Firstly, if factory had not one, but several arguments, you would have to provide overloads for all combinations of non-const and const reference for the various arguments. Thus, the solution scales extremely poorly to functions with several arguments.
// Secondly, this kind of forwarding is less than perfect because it blocks out move semantics: the argument of the constructor of T in the body of factory is an lvalue. Therefore, move semantics can never happen even if it would without the wrapping function.
// It turns out that rvalue references can be used to solve both these problems. They make it possible to achieve truly perfect forwarding without the use of overloads. In order to understand how, we need to look at two more rules for rvalue references.
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// http://thbecker.net/articles/rvalue_references/section_08.html
// Perfect Forwarding: The Solution
// The first of the remaining two rules for rvalue references affects old-style lvalue references as well. Recall that in pre-11 C++, it was not allowed to take a reference to a reference: something like A& & would cause a compile error. C++11, by contrast, introduces the following reference collapsing rules1:
//
// A& & becomes A&
// A& && becomes A&
// A&& & becomes A&
// A&& && becomes A&&
// Secondly, there is a special template argument deduction rule for function templates that take an argument by rvalue reference to a template argument:
template<typename T>
void foo(T&&);
// Here, the following apply:
// When foo is called on an lvalue of type A, then T resolves to A& and hence, by the reference collapsing rules above, the argument type effectively becomes A&.
// When foo is called on an rvalue of type A, then T resolves to A, and hence the argument type becomes A&&.
// Given these rules, we can now use rvalue references to solve the perfect forwarding problem as set forth in the previous section. Here's what the solution looks like:
//
template<typename T, typename Arg>
shared_ptr<T> factory(Arg&& arg)
{
return shared_ptr<T>(new T(std::forward<Arg>(arg)));
}
// where std::forward is defined as follows:
template<class S>
S&& forward(typename remove_reference<S>::type& a) noexcept
{
return static_cast<S&&>(a);
}
// (Don't pay attention to the noexcept keyword for now. It lets the compiler know, for certain optimization purposes, that this function will never throw an exception. We'll come back to it in Section 9.) To see how the code above achieves perfect forwarding, we will discuss separately what happens when our factory function gets called on lvalues and rvalues. Let A and X be types. Suppose first that factory<A> is called on an lvalue of type X:
X x;
factory<A>(x);
// Then, by the special template deduction rule stated above, factory's template argument Arg resolves to X&. Therefore, the compiler will create the following instantiations of factory and std::forward:
shared_ptr<A> factory(X& && arg)
{
return shared_ptr<A>(new A(std::forward<X&>(arg)));
}
X& && forward(remove_reference<X&>::type& a) noexcept
{
return static_cast<X& &&>(a);
}
// After evaluating the remove_reference and applying the reference collapsing rules, this becomes:
shared_ptr<A> factory(X& arg)
{
return shared_ptr<A>(new A(std::forward<X&>(arg)));
}
X& std::forward(X& a)
{
return static_cast<X&>(a);
}
// This is certainly perfect forwarding for lvalues: the argument arg of the factory function gets passed on to A's constructor through two levels of indirection, both by old-fashioned lvalue reference.
// Next, suppose that factory<A> is called on an rvalue of type X:
X foo();
factory<A>(foo());
// Then, again by the special template deduction rule stated above, factory's template argument Arg resolves to X. Therefore, the compiler will now create the following function template instantiations:
shared_ptr<A> factory(X&& arg)
{
return shared_ptr<A>(new A(std::forward<X>(arg)));
}
X&& forward(X& a) noexcept
{
return static_cast<X&&>(a);
}
// This is indeed perfect forwarding for rvalues: the argument of the factory function gets passed on to A's constructor through two levels of indirection, both by reference. Moreover, A's constructor sees as its argument an expression that is declared as an rvalue reference and does not have a name. By the no-name rule, such a thing is an rvalue. Therefore, A's constructor gets called on an rvalue. This means that the forwarding preserves any move semantics that would have taken place if the factory wrapper were not present.
// It is perhaps worth noting that the preservation of move semantics is in fact the only purpose of std::forward in this context. Without the use of std::forward, everything would work quite nicely, except that A's constructor would always see as its argument something that has a name, and such a thing is an lvalue. Another way of putting this is to say that std::forward's purpose is to forward the information whether at the call site, the wrapper saw an lvalue or an rvalue.
// If you want to dig a little deeper for extra credit, ask yourself this question: why is the remove_reference in the definition of std::forward needed? The answer is, it is not really needed at all. If you use just S& instead of remove_reference<S>::type& in the defintion of std::forward, you can repeat the case distinction above to convince yourself that perfect forwarding still works just fine. However, it works fine only as long as we explicitly specify Arg as the template argument of std::forward. The purpose of the remove_reference in the definition of std::forward is to force us to do so.
// Rejoice. We're almost done. It only remains to look at the implementation of std::move. Remember, the purpose of std::move is to pass its argument right through by reference and make it bind like an rvalue. Here's the implementation:
template<class T>
typename remove_reference<T>::type&&
std::move(T&& a) noexcept
{
typedef typename remove_reference<T>::type&& RvalRef;
return static_cast<RvalRef>(a);
}
// Suppose that we call std::move on an lvalue of type X:
X<int> x;
X<int> xx = std::move(x);
// By the new special template deduction rule, the template argument T will resolve to X&. Therefore, what the compiler ends up instantiating is
typename remove_reference<X&>::type&&
std::move(X& && a) noexcept
{
typedef typename remove_reference<X&>::type&& RvalRef;
return static_cast<RvalRef>(a);
}
// After evaluating the remove_reference and applying the new reference collapsing rules, this becomes
X&& std::move(X& a) noexcept
{
return static_cast<X&&>(a);
}
// That does the job: our lvalue x will bind to the lvalue reference that is the argument type, and the function passes it right through, turning it into an unnamed rvalue reference.
// I leave it to you to convince yourself that std::move actually works fine when called on an rvalue. But then you may want to skip that: why would anybody want to call std::move on an rvalue, when its only purpose is to turn things into rvalues? Also, you have probably noticed by now that instead of
std::move(x);
// you could just as well write
static_cast<X&&>(x);
// However, std::move is strongly preferred because it is more expressive.
#endif
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// http://en.cppreference.com/w/cpp/types/remove_reference
template<class T1, class T2>
void print_is_same() {
std::cout << std::is_same<T1, T2>() << '\n';
}
int main() {
std::cout << std::boolalpha;
print_is_same<int, int>(); // true
print_is_same<int, int &>(); // false
print_is_same<int, int &&>(); // false
print_is_same<int, std::remove_reference<int>::type>(); // true
print_is_same<int, std::remove_reference<int &>::type>(); // true
print_is_same<int, std::remove_reference<int &&>::type>(); // true
}
|
// Copyright 2019 Copyright (c) 2019 SAP SE or an SAP affiliate company. All rights reserved. This file is licensed under the Apache Software License, v. 2 except as noted otherwise in the LICENSE file.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package testdefinition
import (
"encoding/base64"
"errors"
"fmt"
"path"
"strings"
"k8s.io/apimachinery/pkg/util/intstr"
"k8s.io/apimachinery/pkg/util/validation/field"
"github.com/gardener/test-infra/pkg/apis/testmachinery/v1beta1/validation"
"github.com/gardener/test-infra/pkg/common"
"github.com/gardener/test-infra/pkg/testmachinery/config"
"github.com/gardener/test-infra/pkg/util"
argov1alpha1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1"
apiv1 "k8s.io/api/core/v1"
corev1 "k8s.io/api/core/v1"
tmv1beta1 "github.com/gardener/test-infra/pkg/apis/testmachinery/v1beta1"
"github.com/gardener/test-infra/pkg/testmachinery"
)
var (
DefaultActiveDeadlineSeconds = intstr.FromInt(600)
archiveLogs = true
)
// New takes a CRD TestDefinition and its locations, and creates a TestDefinition object.
func New(def *tmv1beta1.TestDefinition, loc Location, fileName string) (*TestDefinition, error) {
if err := validation.ValidateTestDefinition(field.NewPath(fmt.Sprintf("Location: \"%s\"; File: \"%s\"", loc.Name(), fileName)), def); len(err) != 0 {
return nil, err.ToAggregate()
}
if def.Spec.Image == "" {
def.Spec.Image = testmachinery.BaseImage()
}
if def.Spec.ActiveDeadlineSeconds == nil {
def.Spec.ActiveDeadlineSeconds = &DefaultActiveDeadlineSeconds
}
template := &argov1alpha1.Template{
Name: "",
Metadata: argov1alpha1.Metadata{
Annotations: map[string]string{
common.AnnotationTestDefName: def.Name,
},
},
ArchiveLocation: &argov1alpha1.ArtifactLocation{
ArchiveLogs: &archiveLogs,
},
ActiveDeadlineSeconds: def.Spec.ActiveDeadlineSeconds,
Container: &apiv1.Container{
Image: def.Spec.Image,
Command: def.Spec.Command,
Args: def.Spec.Args,
Resources: def.Spec.Resources,
WorkingDir: testmachinery.TM_REPO_PATH,
Env: []apiv1.EnvVar{
{
Name: testmachinery.TM_REPO_PATH_NAME,
Value: testmachinery.TM_REPO_PATH,
},
{
Name: testmachinery.TM_KUBECONFIG_PATH_NAME,
Value: testmachinery.TM_KUBECONFIG_PATH,
},
{
Name: testmachinery.TM_SHARED_PATH_NAME,
Value: testmachinery.TM_SHARED_PATH,
},
{
Name: testmachinery.TM_EXPORT_PATH_NAME,
Value: testmachinery.TM_EXPORT_PATH,
},
{
Name: testmachinery.TM_PHASE_NAME,
Value: "{{inputs.parameters.phase}}",
},
{
Name: testmachinery.TM_GIT_SHA_NAME,
Value: loc.GitInfo().SHA,
},
{
Name: testmachinery.TM_GIT_REF_NAME,
Value: loc.GitInfo().Ref,
},
},
},
Inputs: argov1alpha1.Inputs{
Parameters: []argov1alpha1.Parameter{
{Name: "phase"},
},
Artifacts: make([]argov1alpha1.Artifact, 0),
},
Outputs: argov1alpha1.Outputs{
Artifacts: make([]argov1alpha1.Artifact, 0),
},
}
outputArtifacts := []argov1alpha1.Artifact{
{
Name: testmachinery.ExportArtifact,
Path: testmachinery.TM_EXPORT_PATH,
Optional: true,
},
}
td := &TestDefinition{
Info: def,
Location: loc,
FileName: fileName,
Template: template,
inputArtifacts: make(ArtifactSet),
outputArtifacts: make(ArtifactSet),
config: config.NewSet(config.New(def.Spec.Config, config.LevelTestDefinition)...),
}
td.AddOutputArtifacts(outputArtifacts...)
return td, nil
}
// New returns a deep copy of the TestDefinition.
func (td *TestDefinition) Copy() *TestDefinition {
template := td.Template.DeepCopy()
template.Name = fmt.Sprintf("%s-%s", td.Info.GetName(), util.RandomString(5))
return &TestDefinition{
Info: td.Info,
Location: td.Location,
FileName: td.FileName,
Template: template,
Volumes: td.Volumes,
inputArtifacts: td.inputArtifacts.Copy(),
outputArtifacts: td.outputArtifacts.Copy(),
config: td.config.Copy(),
}
}
func (td *TestDefinition) SetName(name string) {
td.AddAnnotation(common.AnnotationTestDefID, name)
td.Template.Name = name
}
func (td *TestDefinition) GetName() string {
return td.Template.Name
}
func (td *TestDefinition) SetSuspend() {
td.Template.Suspend = &argov1alpha1.SuspendTemplate{}
}
func (td *TestDefinition) GetTemplate() (*argov1alpha1.Template, error) {
for _, cfg := range td.config {
switch cfg.Info.Type {
case tmv1beta1.ConfigTypeEnv:
td.addConfigAsEnv(cfg)
case tmv1beta1.ConfigTypeFile:
if err := td.addConfigAsFile(cfg); err != nil {
return nil, err
}
}
}
return td.Template, nil
}
// HasBehavior checks if the testrun has defined a specific behavior like serial or disruptiv.
func (td *TestDefinition) HasBehavior(behavior string) bool {
for _, b := range td.Info.Spec.Behavior {
if b == behavior {
return true
}
}
return false
}
// HasLabel checks if the TestDefinition has a specific label. (Group in testdef)
func (td *TestDefinition) HasLabel(label string) bool {
wantedLabels := strings.Split(label, ",")
for _, wantedLabel := range wantedLabels {
hasLabel := false
for _, haveLabel := range td.Info.Spec.Labels {
if strings.HasPrefix(wantedLabel, "!") && strings.TrimPrefix(wantedLabel, "!") == haveLabel {
return false
}
if haveLabel == wantedLabel {
hasLabel = true
break
}
}
if !hasLabel {
return false
}
}
return true
}
// AddEnvVars adds environment variables to the container of the TestDefinition's template.
func (td *TestDefinition) AddEnvVars(envs ...apiv1.EnvVar) {
td.Template.Container.Env = append(td.Template.Container.Env, envs...)
}
// AddInputArtifacts adds argo artifacts to the input of the TestDefinitions's template.
func (td *TestDefinition) AddInputArtifacts(artifacts ...argov1alpha1.Artifact) {
if td.inputArtifacts == nil {
td.inputArtifacts = make(ArtifactSet)
}
for _, a := range artifacts {
if !td.inputArtifacts.Has(a.Name) {
td.Template.Inputs.Artifacts = append(td.Template.Inputs.Artifacts, a)
td.inputArtifacts.Add(a.Name)
}
}
}
// AddOutputArtifacts adds argo artifacts to the output of the TestDefinitions's template.
func (td *TestDefinition) AddOutputArtifacts(artifacts ...argov1alpha1.Artifact) {
if td.outputArtifacts == nil {
td.outputArtifacts = make(ArtifactSet)
}
for _, a := range artifacts {
if !td.outputArtifacts.Has(a.Name) {
td.Template.Outputs.Artifacts = append(td.Template.Outputs.Artifacts, a)
td.outputArtifacts.Add(a.Name)
}
}
}
// AddInputParameter adds a parameter to the input of the TestDefinitions's template.
func (td *TestDefinition) AddInputParameter(name, value string) {
td.Template.Inputs.Parameters = append(td.Template.Inputs.Parameters, argov1alpha1.Parameter{
Name: name,
Value: argov1alpha1.AnyStringPtr(value)},
)
}
// AddVolumeMount adds a mount to the container of the TestDefinitions's template.
func (td *TestDefinition) AddVolumeMount(name, path, subpath string, readOnly bool) {
td.Template.Container.VolumeMounts = append(td.Template.Container.VolumeMounts, apiv1.VolumeMount{
Name: name,
MountPath: path,
SubPath: subpath,
ReadOnly: readOnly,
})
}
// AddVolume adds a volume to a TestDefinitions's template.
func (td *TestDefinition) AddVolume(volume apiv1.Volume) {
td.Template.Volumes = append(td.Template.Volumes, volume)
}
// AddStdOutput adds the Kubeconfig output to the TestDefinitions's template.
func (td *TestDefinition) AddStdOutput(global bool) {
td.AddOutputArtifacts(GetStdOutputArtifacts(global)...)
}
func (td *TestDefinition) GetConfig() config.Set {
return td.config
}
// AddConfig adds the config elements of different types (environment variable) to the TestDefinitions's template.
func (td *TestDefinition) AddConfig(configs []*config.Element) {
for _, e := range configs {
td.config.Add(e)
}
}
func (td *TestDefinition) addConfigAsEnv(element *config.Element) {
if element.Info.Value != "" {
// add as input parameter to see parameters in argo ui
td.AddInputParameter(element.Name(), fmt.Sprintf("%s: %s", element.Info.Name, element.Info.Value))
td.AddEnvVars(apiv1.EnvVar{Name: element.Info.Name, Value: element.Info.Value})
} else {
// add as input parameter to see parameters in argo ui
td.AddInputParameter(element.Name(), fmt.Sprintf("%s: %s", element.Info.Name, "from secret or configmap"))
td.AddEnvVars(apiv1.EnvVar{
Name: element.Info.Name,
ValueFrom: &corev1.EnvVarSource{
ConfigMapKeyRef: element.Info.ValueFrom.ConfigMapKeyRef,
SecretKeyRef: element.Info.ValueFrom.SecretKeyRef,
},
})
}
}
func (td *TestDefinition) addConfigAsFile(element *config.Element) error {
if element.Info.Value != "" {
data, err := base64.StdEncoding.DecodeString(element.Info.Value)
if err != nil {
return fmt.Errorf("cannot decode value of %s: %s", element.Info.Name, err.Error())
}
// add as input parameter to see parameters in argo ui
td.AddInputParameter(element.Name(), fmt.Sprintf("%s: %s", element.Info.Name, element.Info.Path))
// Add the file path as env var with the config name to the pod
td.AddEnvVars(apiv1.EnvVar{Name: element.Info.Name, Value: element.Info.Path})
td.AddInputArtifacts(argov1alpha1.Artifact{
Name: element.Name(),
Path: element.Info.Path,
ArtifactLocation: argov1alpha1.ArtifactLocation{
Raw: &argov1alpha1.RawArtifact{
Data: string(data),
},
},
})
return nil
}
if element.Info.ValueFrom != nil {
// add as input parameter to see parameters in argo ui
td.AddInputParameter(element.Name(), fmt.Sprintf("%s: %s", element.Info.Name, element.Info.Path))
// Add the file path as env var with the config name to the pod
td.AddEnvVars(apiv1.EnvVar{Name: element.Info.Name, Value: element.Info.Path})
td.AddVolumeMount(element.Name(), element.Info.Path, path.Base(element.Info.Path), true)
return td.AddVolumeFromConfig(element)
}
// this should never happen as it is already validated
return errors.New("either value or value from has to be defined")
}
func (td *TestDefinition) AddVolumeFromConfig(cfg *config.Element) error {
vol, err := cfg.Volume()
if err != nil {
return err
}
td.Volumes = append(td.Volumes, *vol)
return nil
}
// GetAnnotations returns Template annotations for a testdefinition
func (td *TestDefinition) AddAnnotation(key, value string) {
if td.Template.Metadata.Annotations == nil {
td.Template.Metadata.Annotations = make(map[string]string)
}
td.Template.Metadata.Annotations[key] = value
}
|
console.log('\'Allo \'Allo!');
|
#!/usr/bin/env bash
set -o nounset
set -o errexit
set -o pipefail
# For disconnected or otherwise unreachable environments, we want to
# have steps use an HTTP(S) proxy to reach the API server. This proxy
# configuration file should export HTTP_PROXY, HTTPS_PROXY, and NO_PROXY
# environment variables, as well as their lowercase equivalents (note
# that libcurl doesn't recognize the uppercase variables).
if test -f "${SHARED_DIR}/proxy-conf.sh"
then
# shellcheck disable=SC1090
source "${SHARED_DIR}/proxy-conf.sh"
fi
function wait_for_sriov_pods() {
# Wait up to 15 minutes for SNO to be installed
for _ in $(seq 1 15); do
SNO_REPLICAS=$(oc get Deployment/sriov-network-operator -n openshift-sriov-network-operator -o jsonpath='{.status.readyReplicas}' || true)
if [ "${SNO_REPLICAS}" == "1" ]; then
FOUND_SNO=1
break
fi
echo "Waiting for sriov-network-operator to be installed"
sleep 60
done
if [ -n "${FOUND_SNO:-}" ] ; then
# Wait for the pods to be started from the operator
for _ in $(seq 1 8); do
NOT_RUNNING_PODS=$(oc get pods --no-headers -n openshift-sriov-network-operator -o jsonpath='{.items[*].status.containerStatuses[*].ready}' | grep false | wc -l || true)
if [ "${NOT_RUNNING_PODS}" == "0" ]; then
OPERATOR_READY=true
break
fi
echo "Waiting for sriov-network-operator pods to be started and running"
sleep 30
done
if [ -n "${OPERATOR_READY:-}" ] ; then
echo "sriov-network-operator pods were installed successfully"
else
echo "sriov-network-operator pods were not installed after 4 minutes"
oc get pods -n openshift-sriov-network-operator
exit 1
fi
else
echo "sriov-network-operator was not installed after 15 minutes"
exit 1
fi
}
oc_version=$(oc version | cut -d ' ' -f 3 | cut -d '.' -f1,2 | sed -n '2p')
case "${oc_version}" in
# Remove 4.11 once it's GA
4.11)
echo "OpenShift 4.11 was detected"
is_dev_version=1 ;;
*) ;;
esac
if [ -n "${is_dev_version:-}" ]; then
echo "The SR-IOV will be installed from Github using release-${oc_version} branch."
git clone --branch release-${oc_version} https://github.com/openshift/sriov-network-operator /tmp/sriov-network-operator
pushd /tmp/sriov-network-operator
# Until https://github.com/openshift/sriov-network-operator/pull/613 merges
cp manifests/stable/supported-nic-ids_v1_configmap.yaml deploy/configmap.yaml
# We need to skip the bits where it tries to install Skopeo
export SKIP_VAR_SET=1
# We export the links of the images, since Skopeo can't be used in the CI container
export SRIOV_CNI_IMAGE=quay.io/openshift/origin-sriov-cni:${oc_version}
export SRIOV_INFINIBAND_CNI_IMAGE=quay.io/openshift/origin-sriov-infiniband-cni:${oc_version}
export SRIOV_DEVICE_PLUGIN_IMAGE=quay.io/openshift/origin-sriov-network-device-plugin:${oc_version}
export NETWORK_RESOURCES_INJECTOR_IMAGE=quay.io/openshift/origin-sriov-dp-admission-controller:${oc_version}
export SRIOV_NETWORK_CONFIG_DAEMON_IMAGE=quay.io/openshift/origin-sriov-network-config-daemon:${oc_version}
export SRIOV_NETWORK_WEBHOOK_IMAGE=quay.io/openshift/origin-sriov-network-webhook:${oc_version}
export SRIOV_NETWORK_OPERATOR_IMAGE=quay.io/openshift/origin-sriov-network-operator:${oc_version}
unset NAMESPACE
# CLUSTER_TYPE is used by both openshift/release and the operator, so we need to unset it
# to let the operator figure out which cluster type it is.
unset CLUSTER_TYPE
make deploy-setup
popd
wait_for_sriov_pods
else
SNO_NAMESPACE=$(
oc create -f - -o jsonpath='{.metadata.name}' <<EOF
apiVersion: v1
kind: Namespace
metadata:
name: openshift-sriov-network-operator
annotations:
workload.openshift.io/allowed: management
EOF
)
echo "Created \"$SNO_NAMESPACE\" Namespace"
SNO_OPERATORGROUP=$(
oc create -f - -o jsonpath='{.metadata.name}' <<EOF
apiVersion: operators.coreos.com/v1
kind: OperatorGroup
metadata:
name: sriov-network-operators
namespace: openshift-sriov-network-operator
spec:
targetNamespaces:
- openshift-sriov-network-operator
EOF
)
echo "Created \"$SNO_OPERATORGROUP\" OperatorGroup"
channel=$(oc version -o yaml | grep openshiftVersion | grep -o '[0-9]*[.][0-9]*' | head -1)
SNO_SUBSCRIPTION=$(
oc create -f - -o jsonpath='{.metadata.name}' <<EOF
apiVersion: operators.coreos.com/v1alpha1
kind: Subscription
metadata:
name: sriov-network-operator-subscription
namespace: openshift-sriov-network-operator
spec:
channel: "${channel}"
name: sriov-network-operator
source: redhat-operators
sourceNamespace: openshift-marketplace
EOF
)
echo "Created \"$SNO_SUBSCRIPTION\" Subscription"
# Wait up to 15 minutes for SNO to be installed
for _ in $(seq 1 90); do
SNO_CSV=$(oc -n "${SNO_NAMESPACE}" get subscription "${SNO_SUBSCRIPTION}" -o jsonpath='{.status.installedCSV}' || true)
if [ -n "$SNO_CSV" ]; then
if [[ "$(oc -n "${SNO_NAMESPACE}" get csv "${SNO_CSV}" -o jsonpath='{.status.phase}')" == "Succeeded" ]]; then
FOUND_SNO=1
break
fi
fi
echo "Waiting for sriov-network-operator to be installed"
sleep 10
done
if [ -n "${FOUND_SNO:-}" ] ; then
wait_for_sriov_pods
echo "sriov-network-operator was installed successfully"
else
echo "sriov-network-operator was not installed after 15 minutes"
exit 1
fi
fi
|
#!/bin/sh
if [ ! -e ~/.config/patchbox-wizard-run ]; then
touch ~/.config/patchbox-wizard-run
sudo chmod -x /etc/update-motd.d/21-patchbox-useful-resources
sudo patchbox-config wizard
clear && cat /run/motd.dynamic
fi
|
<reponame>lgarciaaco/cos-fleetshard
package org.bf2.cos.fleetshard.operator.debezium;
import java.nio.charset.StandardCharsets;
import java.util.Base64;
import java.util.Map;
import java.util.function.Consumer;
import java.util.stream.Stream;
import org.bf2.cos.fleetshard.api.ConnectorStatusSpec;
import org.bf2.cos.fleetshard.api.DeploymentSpecBuilder;
import org.bf2.cos.fleetshard.api.KafkaSpecBuilder;
import org.bf2.cos.fleetshard.api.ManagedConnector;
import org.bf2.cos.fleetshard.api.ManagedConnectorBuilder;
import org.bf2.cos.fleetshard.api.ManagedConnectorSpecBuilder;
import org.bf2.cos.fleetshard.api.ServiceAccountSpecBuilder;
import org.bf2.cos.fleetshard.operator.connector.ConnectorConfiguration;
import org.bf2.cos.fleetshard.operator.debezium.model.AbstractApicurioConverter;
import org.bf2.cos.fleetshard.operator.debezium.model.ApicurioAvroConverter;
import org.bf2.cos.fleetshard.operator.debezium.model.ApicurioJsonConverter;
import org.bf2.cos.fleetshard.operator.debezium.model.DebeziumDataShape;
import org.bf2.cos.fleetshard.operator.debezium.model.KafkaConnectJsonConverter;
import org.bf2.cos.fleetshard.operator.debezium.model.KafkaConnectorStatus;
import org.bf2.cos.fleetshard.operator.debezium.model.KeyAndValueConverters;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import org.mockito.Mockito;
import com.fasterxml.jackson.databind.node.ObjectNode;
import io.fabric8.kubernetes.api.model.ObjectMetaBuilder;
import io.fabric8.kubernetes.client.KubernetesClient;
import io.fabric8.kubernetes.client.utils.Serialization;
import io.strimzi.api.kafka.model.Constants;
import io.strimzi.api.kafka.model.KafkaConnect;
import io.strimzi.api.kafka.model.KafkaConnector;
import io.strimzi.api.kafka.model.KafkaConnectorBuilder;
import io.strimzi.api.kafka.model.status.ConditionBuilder;
import io.strimzi.api.kafka.model.status.KafkaConnectorStatusBuilder;
import static org.assertj.core.api.Assertions.assertThat;
import static org.bf2.cos.fleetshard.api.ManagedConnector.DESIRED_STATE_READY;
import static org.bf2.cos.fleetshard.operator.debezium.DebeziumConstants.EXTERNAL_CONFIG_FILE;
import static org.junit.jupiter.params.provider.Arguments.arguments;
public class DebeziumOperandControllerTest {
private static final String DEFAULT_MANAGED_CONNECTOR_ID = "mid";
private static final Long DEFAULT_CONNECTOR_REVISION = 1L;
private static final String DEFAULT_CONNECTOR_TYPE_ID = "ctid";
private static final String DEFAULT_CONNECTOR_IMAGE = "quay.io/cos/pg:1";
private static final String DEFAULT_DEPLOYMENT_ID = "1";
private static final Long DEFAULT_DEPLOYMENT_REVISION = 1L;
private static final String CLIENT_ID = "kcid";
private static final String CLIENT_SECRET = Base64.getEncoder().encodeToString("kcs".getBytes(StandardCharsets.UTF_8));
private static final String DEFAULT_KAFKA_SERVER = "kafka.acme.com:2181";
private static final String PG_CLASS = "io.debezium.connector.postgresql.PostgresConnector";
private static final String SCHEMA_REGISTRY_URL = "https://bu98.serviceregistry.rhcloud.com/t/51eba005-daft-punk-afe1-b2178bcb523d/apis/registry/v2";
private static final String SCHEMA_REGISTRY_ID = "9bsv0s0k8lng031se9q0";
private static final DebeziumOperandConfiguration CONFIGURATION = new DebeziumOperandConfiguration() {
@Override
public KafkaConnect kafkaConnect() {
return Map::of;
}
@Override
public KafkaConnector kafkaConnector() {
return Map::of;
}
};
public static Stream<Arguments> computeStatus() {
return Stream.of(
arguments(
KafkaConnectorStatus.STATE_RUNNING,
"Ready",
"reason",
ManagedConnector.STATE_READY),
arguments(
KafkaConnectorStatus.STATE_RUNNING,
"NotReady",
"reason",
ManagedConnector.STATE_PROVISIONING),
arguments(
KafkaConnectorStatus.STATE_RUNNING,
"NotReady",
"ConnectRestException",
ManagedConnector.STATE_FAILED),
arguments(
KafkaConnectorStatus.STATE_FAILED,
"Foo",
"Bar",
ManagedConnector.STATE_FAILED),
arguments(
KafkaConnectorStatus.STATE_PAUSED,
"Foo",
"Bar",
ManagedConnector.STATE_STOPPED),
arguments(
KafkaConnectorStatus.STATE_UNASSIGNED,
"Foo",
"Bar",
ManagedConnector.STATE_PROVISIONING));
}
@Test
void declaresExpectedResourceTypes() {
KubernetesClient kubernetesClient = Mockito.mock(KubernetesClient.class);
DebeziumOperandController controller = new DebeziumOperandController(kubernetesClient, CONFIGURATION);
assertThat(controller.getResourceTypes())
.hasSize(2)
.anyMatch(ctx -> Constants.RESOURCE_GROUP_NAME.equals(ctx.getGroup())
&& KafkaConnect.CONSUMED_VERSION.equals(ctx.getVersion())
&& KafkaConnect.RESOURCE_KIND.equals(ctx.getKind()))
.anyMatch(ctx -> Constants.RESOURCE_GROUP_NAME.equals(ctx.getGroup())
&& KafkaConnector.CONSUMED_VERSION.equals(ctx.getVersion())
&& KafkaConnector.RESOURCE_KIND.equals(ctx.getKind()));
}
private ObjectNode getSpec() {
var spec = Serialization.jsonMapper().createObjectNode()
.put("database.hostname", "orderdb")
.put("database.port", "5432")
.put("database.user", "orderuser")
.put("database.dbname", "orderdb")
.put("database.server.name", "dbserver1")
.put("schema.include.list", "purchaseorder")
.put("table.include.list", "purchaseorder.outboxevent")
.put("tombstones.on.delete", "false")
.put("transforms", "saga")
.put("transforms.saga.type", "io.debezium.transforms.outbox.EventRouter")
.put("transforms.saga.route.topic.replacement", "${routedByValue}.request")
.put("poll.interval.ms", "100")
.put("consumer.interceptor.classes", "io.opentracing.contrib.kafka.TracingConsumerInterceptor")
.put("producer.interceptor.classes", "io.opentracing.contrib.kafka.TracingProducerInterceptor");
var pwdB64 = Base64.getEncoder().encodeToString("orderpw".getBytes(StandardCharsets.UTF_8));
spec.with("database.password").put("kind", "base64").put("value", pwdB64);
return spec;
}
private ObjectNode addAvroToConnectorConfig(ObjectNode baseConfig) {
baseConfig.with("data_shape").put("key", "AVRO").put("value", "AVRO");
return baseConfig;
}
private ObjectNode addJsonWithSchemaToConnectorConfig(ObjectNode baseConfig) {
baseConfig.with("data_shape").put("key", "JSON").put("value", "JSON");
return baseConfig;
}
private ObjectNode addSchemalessJsonToConnectorConfig(ObjectNode baseConfig) {
baseConfig.with("data_shape").put("key", "JSON without schema").put("value", "JSON_WITHOUT_SCHEMA");
return baseConfig;
}
void reify(ObjectNode connectorConfig, Consumer<KafkaConnect> kafkaConnectChecks) {
KubernetesClient kubernetesClient = Mockito.mock(KubernetesClient.class);
DebeziumOperandController controller = new DebeziumOperandController(kubernetesClient, CONFIGURATION);
var resources = controller.doReify(
new ManagedConnectorBuilder()
.withMetadata(new ObjectMetaBuilder()
.withName(DEFAULT_MANAGED_CONNECTOR_ID)
.build())
.withSpec(new ManagedConnectorSpecBuilder()
.withConnectorId(DEFAULT_MANAGED_CONNECTOR_ID)
.withDeploymentId(DEFAULT_DEPLOYMENT_ID)
.withDeployment(new DeploymentSpecBuilder()
.withConnectorTypeId(DEFAULT_CONNECTOR_TYPE_ID)
.withSecret("secret")
.withKafka(new KafkaSpecBuilder().withUrl(DEFAULT_KAFKA_SERVER).build())
.withNewSchemaRegistry(SCHEMA_REGISTRY_ID, SCHEMA_REGISTRY_URL)
.withConnectorResourceVersion(DEFAULT_CONNECTOR_REVISION)
.withDeploymentResourceVersion(DEFAULT_DEPLOYMENT_REVISION)
.withDesiredState(DESIRED_STATE_READY)
.build())
.build())
.build(),
new org.bf2.cos.fleetshard.operator.debezium.DebeziumShardMetadataBuilder()
.withContainerImage(DEFAULT_CONNECTOR_IMAGE)
.withConnectorClass(PG_CLASS)
.build(),
new ConnectorConfiguration<>(connectorConfig, ObjectNode.class,
DebeziumDataShape.class),
new ServiceAccountSpecBuilder()
.withClientId(CLIENT_ID)
.withClientSecret(CLIENT_SECRET)
.build());
assertThat(resources)
.anyMatch(DebeziumOperandSupport::isKafkaConnect)
.anyMatch(DebeziumOperandSupport::isKafkaConnector)
.anyMatch(DebeziumOperandSupport::isSecret);
assertThat(resources)
.filteredOn(DebeziumOperandSupport::isKafkaConnect)
.hasSize(1)
.first()
.isInstanceOfSatisfying(KafkaConnect.class, kc -> {
assertThat(kc.getSpec().getImage()).isEqualTo(DEFAULT_CONNECTOR_IMAGE);
});
assertThat(resources)
.filteredOn(DebeziumOperandSupport::isKafkaConnect)
.hasSize(1)
.first()
.isInstanceOfSatisfying(KafkaConnect.class, kc -> {
assertThat(kc.getSpec().getTemplate().getPod().getImagePullSecrets())
.contains(DebeziumConstants.IMAGE_PULL_SECRET);
});
assertThat(resources)
.filteredOn(DebeziumOperandSupport::isKafkaConnector)
.hasSize(1)
.first()
.isInstanceOfSatisfying(KafkaConnector.class, kc -> assertThat(kc.getSpec().getConfig()).containsEntry(
"database.password",
"${file:/opt/kafka/external-configuration/"
+ DebeziumConstants.EXTERNAL_CONFIG_DIRECTORY
+ "/"
+ EXTERNAL_CONFIG_FILE
+ ":database.password}"));
assertThat(resources)
.filteredOn(DebeziumOperandSupport::isKafkaConnect)
.hasSize(1)
.first()
.isInstanceOfSatisfying(KafkaConnect.class, kafkaConnectChecks);
}
@Test
void testReifyWithSchemalessJson() {
this.reify(addSchemalessJsonToConnectorConfig(getSpec()),
kafkaConnect -> {
assertThat(kafkaConnect.getSpec().getConfig()).containsEntry(KeyAndValueConverters.PROPERTY_KEY_CONVERTER,
KafkaConnectJsonConverter.CONVERTER_CLASS);
assertThat(kafkaConnect.getSpec().getConfig())
.containsEntry(KeyAndValueConverters.PROPERTY_KEY_CONVERTER + ".schemas.enable", "false");
assertThat(kafkaConnect.getSpec().getConfig()).containsEntry(KeyAndValueConverters.PROPERTY_VALUE_CONVERTER,
KafkaConnectJsonConverter.CONVERTER_CLASS);
assertThat(kafkaConnect.getSpec().getConfig())
.containsEntry(KeyAndValueConverters.PROPERTY_VALUE_CONVERTER + ".schemas.enable", "false");
});
}
private Consumer<KafkaConnect> getApicurioChecks(String converterClass) {
return kafkaConnect -> {
assertThat(kafkaConnect.getSpec().getConfig()).containsEntry(KeyAndValueConverters.PROPERTY_KEY_CONVERTER,
converterClass);
assertThat(kafkaConnect.getSpec().getConfig()).containsEntry(KeyAndValueConverters.PROPERTY_VALUE_CONVERTER,
converterClass);
assertThat(kafkaConnect.getSpec().getConfig()).containsEntry(
KeyAndValueConverters.PROPERTY_KEY_CONVERTER + ".apicurio.auth.service.url",
AbstractApicurioConverter.APICURIO_AUTH_SERVICE_URL);
assertThat(kafkaConnect.getSpec().getConfig()).containsEntry(
KeyAndValueConverters.PROPERTY_VALUE_CONVERTER + ".apicurio.auth.service.url",
AbstractApicurioConverter.APICURIO_AUTH_SERVICE_URL);
assertThat(kafkaConnect.getSpec().getConfig()).containsEntry(
KeyAndValueConverters.PROPERTY_KEY_CONVERTER + ".apicurio.auth.realm", "rhoas");
assertThat(kafkaConnect.getSpec().getConfig()).containsEntry(
KeyAndValueConverters.PROPERTY_VALUE_CONVERTER + ".apicurio.auth.realm", "rhoas");
assertThat(kafkaConnect.getSpec().getConfig()).containsEntry(
KeyAndValueConverters.PROPERTY_KEY_CONVERTER + ".apicurio.registry.url",
SCHEMA_REGISTRY_URL);
assertThat(kafkaConnect.getSpec().getConfig()).containsEntry(
KeyAndValueConverters.PROPERTY_VALUE_CONVERTER + ".apicurio.registry.url",
SCHEMA_REGISTRY_URL);
assertThat(kafkaConnect.getSpec().getConfig()).containsEntry(
KeyAndValueConverters.PROPERTY_KEY_CONVERTER + ".apicurio.auth.client.id",
CLIENT_ID);
assertThat(kafkaConnect.getSpec().getConfig()).containsEntry(
KeyAndValueConverters.PROPERTY_VALUE_CONVERTER + ".apicurio.auth.client.id",
CLIENT_ID);
assertThat(kafkaConnect.getSpec().getConfig()).containsEntry(
KeyAndValueConverters.PROPERTY_KEY_CONVERTER + ".apicurio.auth.client.secret",
"${dir:/opt/kafka/external-configuration/connector-configuration:_kafka.client.secret}");
assertThat(kafkaConnect.getSpec().getConfig()).containsEntry(
KeyAndValueConverters.PROPERTY_VALUE_CONVERTER + ".apicurio.auth.client.secret",
"${dir:/opt/kafka/external-configuration/connector-configuration:_kafka.client.secret}");
assertThat(kafkaConnect.getSpec().getConfig()).containsEntry(
KeyAndValueConverters.PROPERTY_KEY_CONVERTER + ".apicurio.registry.auto-register", "true");
assertThat(kafkaConnect.getSpec().getConfig()).containsEntry(
KeyAndValueConverters.PROPERTY_VALUE_CONVERTER + ".apicurio.registry.auto-register", "true");
assertThat(kafkaConnect.getSpec().getConfig()).containsEntry(
KeyAndValueConverters.PROPERTY_KEY_CONVERTER + ".apicurio.registry.find-latest", "true");
assertThat(kafkaConnect.getSpec().getConfig()).containsEntry(
KeyAndValueConverters.PROPERTY_VALUE_CONVERTER + ".apicurio.registry.find-latest", "true");
};
}
@Test
void testReifyWithAvro() {
this.reify(addAvroToConnectorConfig(getSpec()), getApicurioChecks(ApicurioAvroConverter.CONVERTER_CLASS));
}
@Test
void testReifyWithJsonWithSchema() {
this.reify(addJsonWithSchemaToConnectorConfig(getSpec()), getApicurioChecks(ApicurioJsonConverter.CONVERTER_CLASS));
}
@ParameterizedTest
@MethodSource
void computeStatus(
String connectorState,
String conditionType,
String conditionReason,
String expectedConnectorState) {
ConnectorStatusSpec status = new ConnectorStatusSpec();
DebeziumOperandSupport.computeStatus(
status,
new KafkaConnectorBuilder()
.withStatus(new KafkaConnectorStatusBuilder()
.addToConditions(new ConditionBuilder()
.withType(conditionType)
.withReason(conditionReason)
.build())
.addToConnectorStatus("connector",
new org.bf2.cos.fleetshard.operator.debezium.model.KafkaConnectorStatusBuilder()
.withState(connectorState)
.build())
.build())
.build());
assertThat(status.getPhase()).isEqualTo(expectedConnectorState);
assertThat(status.getConditions()).anySatisfy(condition -> {
assertThat(condition)
.hasFieldOrPropertyWithValue("type", conditionType)
.hasFieldOrPropertyWithValue("reason", conditionReason);
});
}
}
|
function addPostHandler(event) {
event.preventDefault();
document.location.replace('/dashboard/add-post');
}
document.querySelector('#add-post').addEventListener('click', addPostHandler); |
#!/usr/bin/env bash
# ------------------------------------------------------------------------------
#
# Program: initpost.sh
# Author: Vitor Britto
# Description: script to create an initial structure for my posts.
#
# Usage: ./initpost.sh [options] <post name>
#
# Options:
# -h, --help output instructions
# -c, --create create post
#
# Alias: alias ipost="bash ~/path/to/script/initpost.sh"
#
# Example:
# ./initpost.sh -c How to replace strings with sed
#
# Important Notes:
# - This script was created to generate new markdown files for my blog.
#
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# | VARIABLES |
# ------------------------------------------------------------------------------
# CORE: Do not change these lines
# ----------------------------------------------------------------
POST_TITLE="${@:2:$(($#-1))}"
POST_NAME="$(echo ${@:2:$(($#-1))} | sed -e 's/ /-/g' | sed "y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/")"
CURRENT_DATE="$(date +'%Y-%m-%d')"
TIME=$(date +"%T")
FILE_NAME="${CURRENT_DATE}-${POST_NAME}.md"
# ----------------------------------------------------------------
# SETTINGS: your configuration goes here
# ----------------------------------------------------------------
# Set your destination folder
BINPATH=$(cd `dirname $0`; pwd)
POSTPATH="${BINPATH}/_posts"
DIST_FOLDER="$POSTPATH"
# Set your blog URL
BLOG_URL="your_site"
# Set your assets URL
ASSETS_URL="assets/img/"
# ----------------------------------------------------------------
# ------------------------------------------------------------------------------
# | UTILS |
# ------------------------------------------------------------------------------
# Header logging
e_header() {
printf "$(tput setaf 38)→ %s$(tput sgr0)\n" "$@"
}
# Success logging
e_success() {
printf "$(tput setaf 76)✔ %s$(tput sgr0)\n" "$@"
}
# Error logging
e_error() {
printf "$(tput setaf 1)✖ %s$(tput sgr0)\n" "$@"
}
# Warning logging
e_warning() {
printf "$(tput setaf 3)! %s$(tput sgr0)\n" "$@"
}
# ------------------------------------------------------------------------------
# | MAIN FUNCTIONS |
# ------------------------------------------------------------------------------
# Everybody need some help
initpost_help() {
cat <<EOT
------------------------------------------------------------------------------
INIT POST - A shortcut to create an initial structure for my posts.
------------------------------------------------------------------------------
Usage: ./initpost.sh [options] <post name>
Options:
-h, --help output instructions
-c, --create create post
Example:
./initpost.sh -c How to replace strings with sed
Important Notes:
- This script was created to generate new text files to my blog.
Copyright (c) Vitor Britto
Licensed under the MIT license.
------------------------------------------------------------------------------
EOT
}
# Initial Content
initpost_content() {
echo "---"
echo "layout: post"
echo "title: \"${POST_TITLE}\""
echo "date: ${CURRENT_DATE} ${TIME}"
echo "image: '/assets/img/'"
echo "description:"
echo "tags:"
echo "categories:"
echo "twitter_text:"
echo "---"
}
# Create file
initpost_file() {
if [ ! -f "$FILE_NAME" ]; then
e_header "Creating template..."
initpost_content > "${DIST_FOLDER}/${FILE_NAME}"
e_success "Initial post successfully created!"
else
e_warning "File already exist."
exit 1
fi
}
# ------------------------------------------------------------------------------
# | INITIALIZE PROGRAM |
# ------------------------------------------------------------------------------
main() {
# Show help
if [[ "${1}" == "-h" || "${1}" == "--help" ]]; then
initpost_help ${1}
exit
fi
# Create
if [[ "${1}" == "-c" || "${1}" == "--create" ]]; then
initpost_file $*
exit
fi
}
# Initialize
main $* |
<reponame>DrItanium/durandal
#ifndef _rampancy_llvm_ir_compiler_h
#define _rampancy_llvm_ir_compiler_h
#include "llvm/Module.h"
#include "llvm/ADT/OwningPtr.h"
#include "llvm/ADT/SmallString.h"
#include "llvm/ExecutionEngine/JIT.h"
#include "llvm/ExecutionEngine/ExecutionEngine.h"
#include "llvm/Support/ManagedStatic.h"
#include "llvm/Support/raw_ostream.h"
#include "llvm/Support/Host.h"
#include "llvm/Support/Path.h"
#include "llvm/Support/TargetSelect.h"
#include "llvm/Support/IRReader.h"
#include "ExpertSystem/KnowledgeConstructor.h"
#include "ExpertSystem/FunctionNamer.h"
#include "ExpertSystem/KnowledgeConstructionEngine.h"
#include "ExpertSystem/CLIPSEnvironment.h"
#include "rampancy/Compiler.h"
namespace rampancy {
class LLVMIRCompiler : public Compiler {
public:
LLVMIRCompiler();
using Compiler::compile;
using Compiler::interpret;
virtual llvm::Module* compile();
virtual llvm::Module* compile(int argc, char** argv);
llvm::Module* compile(const std::string &fileName,
llvm::SMDiagnostic &err);
virtual llvm::Module* interpret();
virtual llvm::Module* interpret(llvm::StringRef input);
};
}
#endif
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=10:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/discrete_Acrobot-v1_ddpg_hardcopy_action_noise_seed3_run8_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env Acrobot-v1 --random-seed 3 --exploration-strategy action_noise --summary-dir ../Double_DDPG_Results_no_monitor/discrete/Acrobot-v1/ddpg_hardcopy_action_noise_seed3_run8 --double-ddpg-flag --target-hard-copy-flag
|
package com.acgist.snail.net.torrent.tracker;
import org.junit.jupiter.api.Test;
import com.acgist.snail.context.TorrentContext;
import com.acgist.snail.context.TrackerContext;
import com.acgist.snail.context.exception.DownloadException;
import com.acgist.snail.context.exception.NetException;
import com.acgist.snail.pojo.session.TorrentSession;
import com.acgist.snail.pojo.session.TrackerSession;
import com.acgist.snail.utils.Performance;
public class UdpTrackerSessionTest extends Performance {
@Test
public void testAnnounce() throws NetException, DownloadException {
final String path = "E:/snail/902FFAA29EE632C8DC966ED9AB573409BA9A518E.torrent";
// final String announceUrl = "udp://explodie.org:6969/announce";
// final String announceUrl = "udp://tracker.moeking.me:6969/announce";
// final String announceUrl = "udp://retracker.akado-ural.ru/announce";
final String announceUrl = "udp://[2001:19f0:6c01:1b7d:5400:1ff:fefc:3c2a]:6969/announce";
final TorrentSession torrentSession = TorrentContext.getInstance().newTorrentSession(path);
final var list = TrackerContext.getInstance().sessions(announceUrl);
final TrackerSession session = list.stream()
.filter(value -> value.equalsAnnounceUrl(announceUrl))
.findFirst()
.get();
session.started(1000, torrentSession);
// session.scrape(1000, torrentSession);
this.pause();
}
}
|
#!/bin/bash
#argument handling
while [ "$1" != "" ]; do
case $1 in
--utts)
shift
utts=$1
;;
--transdir)
shift
dir=$1
;;
*)
echo "unknown argument" >&2
esac
shift
done
. ./path.sh
export CPLUS_INCLUDE_PATH=${KALDI_ROOT}/tools/openfst/include
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${CPLUS_INCLUDE_PATH}/fst:${KALDI_ROOT}/tools/openfst/lib
export PATH=${SBS_DATADIR}/rsloan/phonetisaurus-0.8a/bin:${SBS_DATADIR}/prefix/bin:$PATH
export PYTHONPATH=${SBS_DATADIR}/rsloan/prefix/lib/python2.7/site-packages
python local/ar_to_ipa.py $dir $utts
|
<reponame>thebrianmoore/ledger-app-bytecoin
/*******************************************************************************
* Bytecoin Wallet for Ledger Nano S
* (c) 2018 - 2019 The Bytecoin developers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
********************************************************************************/
#ifndef BYTECOIN_DEBUG_H
#define BYTECOIN_DEBUG_H
#include "os.h"
#define STR(x) #x
#define XSTR(x) STR(x)
#define PRINT_PRIMITIVE(primitive) \
PRINTF(XSTR(primitive)": %.*h\n", sizeof((primitive).data), (primitive).data);
#define PRINT_BUF(buf) \
PRINTF(XSTR(buf)": %.*h\n", sizeof(buf), (buf));
#endif // BYTECOIN_DEBUG_H
|
<reponame>NVIDIA-Jetson/jetson-trashformers
#ifndef DETECTNETCAMERA_H_
#define DETECTNETCAMERA_H_
#include <stdint.h>
#include <string>
void sig_handler(int signo);
int main( int argc, char** argv );
int runDetectNet(std::string modelNum);
float** getBoundingBoxArray();
int* getNumBoundingBox();
bool getStopSignal();
float* getConfCPU();
bool getLoopLock();
void setLoopLock(bool lockState);
void switchCamera();
uint32_t getCameraHeight();
uint32_t getCameraWidth();
bool isCameraLoaded();
void setCameraPorts(int default_source1, int source2);
bool isCurrentCamBottomCam();
#endif
|
#!/bin/bash
trap "exit" INT;
refreshed=false;
# Check for if the program exists or not
function does_not_exist() {
! which "$1" &> /dev/null
}
function install() {
# Sometimes the name of the program is not the name of the binary
# Handle that by checking for a second value to use for the display
# name, but use the first one as the install name
if [ "$#" -ne 2 ]; then
prog=$1;
else
prog=$2;
fi
echo "🔎 Checking if $prog is installed.";
if does_not_exist "$prog"; then
if [ "$refreshed" == "false" ]; then
$update &> /dev/null;
refreshed=true;
fi
echo "❌ $prog is not installed. Installing.";
$install "$1" &> /dev/null;
echo "✔️ $prog is installed.";
else
echo "✔️ $prog is already installed. Skipping.";
fi
}
function cargo_install() {
# Sometimes the name of the program is not the name of the binary
# Handle that by checking for a second value to use for the display
# name, but use the first one as the install name
if [ "$#" -ne 2 ]; then
prog=$1;
else
prog=$2;
fi
echo "🔎 Checking if $prog is installed."
if does_not_exist "$prog"; then
if [ "$refreshed" == "false" ]; then
$update;
refreshed=true;
fi
echo "❌ $prog is not installed. Installing."
cargo install "$1" &> /dev/null;
echo "✔️ $prog is installed.";
else
echo "✔️ $prog is already installed. Skipping.";
fi
}
function install_neovim() {
echo "🔎 Checking if neovim is installed.";
if does_not_exist "nvim"; then
if [ "$refreshed" == "false" ]; then
$update;
refreshed=true;
fi
echo "❌ neovim is not installed. Installing.";
if [ "$operating_system" == "Arch Linux" ]; then
cd yay || exit;
makepkg --syncdeps &> /dev/null;
yes | makepkg --install &> /dev/null;
yay -Syy neovim-nightly-bin &> /dev/null;
cd - || exit;
elif [ "$operating_system" == "Ubuntu" ]; then
sudo add-apt-repository ppa:neovim-ppa/unstable;
sudo apt-get update;
sudo apt-get install neovim;
else
echo Unsupported OS for dotfiles;
exit 1;
fi
echo "✔️ neovim is installed.";
else
echo "✔️ neovim is already installed. Skipping.";
fi
}
function symlink_file() {
echo "🔎 Checking if $1 is symlinked.";
if [ ! -f $1 ]; then
echo "❌ $1 is not symlinked. Symlinking.";
ln -s "$config_dir/$1" .
echo "✔️ $1 is symlinked.";
else
echo "✔️ $1 is already symlinked. Skipping.";
fi
}
function symlink_dir() {
echo "🔎 Checking if $1 is symlinked.";
if [ ! -d $1 ]; then
echo "❌ $1 is not symlinked. Symlinking.";
ln -s "$config_dir/$1" .
echo "✔️ $1 is symlinked.";
if [ "$#" -eq 2 ]; then
echo "🏃 Running install step for $1";
$2;
fi
else
echo "✔️ $1 is already symlinked. Skipping.";
fi
}
function install_fzf() {
cd .fzf || exit;
./install --bin &> /dev/null;
cd .. || exit;
}
function install_neovim_deps() {
# Due to the non blockind nature I couldn't get PackerInstall to then run
git clone https://github.com/wbthomason/packer.nvim \
"$HOME/.local/share/nvim/site/pack/packer/start/packer.nvim" &> /dev/null;
}
# Get the operating system on so that commands that differ can be carried out
operating_system=$(cat < /etc/os-release | head -n 1 | cut -d= -f 2 | cut -d'"' -f2);
if [ "$operating_system" == "Arch Linux" ]; then
install="sudo pacman --noconfirm -S";
update="sudo pacman -Syy";
# Hack way to check if base-devel is installed
ls /usr/sbin/make &> /dev/null
out=$?
if [ $out -ne 0 ]; then
install "base-devel";
fi
elif [ "$operating_system" == "Ubuntu" ]; then
install="sudo apt-get -y install";
update="sudo apt-get update";
dpkg -l build-essential &> /dev/null;
out=$?;
if [ $out -ne 0 ]; then
install "build-essential";
fi
dpkg -l libssl-dev &> /dev/null;
out=$?;
if [ $out -ne 0 ]; then
install "libssl-dev";
fi
else
echo Unsupported OS for dotfiles;
exit 1;
fi
# Try Arch package name and then Ubuntu package name if there is a failure
# If they have the same name then just use the one command
install "git";
git submodule update --recursive --init;
install "zsh";
install "tmux";
install "wget";
install "curl";
install "jq";
install "bmake";
install "cmake";
install "autoconf";
install "openssl";
install "openssh" "ssh";
install "less";
install_neovim;
zsh_location="/bin/zsh";
current_shell="$(cat < /etc/passwd | grep "$USER" | cut -d':' -f 7)";
if [ "$zsh_location" != "$current_shell" ]; then
sudo chsh -s "$zsh_location" "$USER";
fi
# Install Rust programs
if does_not_exist "rustup"; then
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh;
rustup install stable &> /dev/null;
rustup target add wasm32-wasi &> /dev/null;
rustup target add wasm32-unknown-unknown &> /dev/null;
rustup component add rustfmt &> /dev/null;
rustup component add rust-src &> /dev/null;
fi
cargo_install "bottom" "btm";
cargo_install "broot";
cargo_install "cargo-cache";
cargo_install "cargo-edit";
cargo_install "cargo-expand";
cargo_install "cargo-outdated";
cargo_install "cargo-udeps";
cargo_install "choose";
cargo_install "du-dust" "dust";
cargo_install "exa";
cargo_install "fd-find" "fd";
cargo_install "gping";
cargo_install "hyperfine";
cargo_install "mdbook";
cargo_install "procs";
cargo_install "ripgrep" "rg";
cargo_install "sd";
cargo_install "starship";
cargo_install "zoxide";
echo "🔎 Checking if rust-analyzer is installed.";
if does_not_exist "rust-analyzer"; then
echo "❌ rust-analyzer is not installed. Installing.";
cd rust-analyzer || exit;
cargo xtask install --server
cd - || exit
echo "✔️ rust-analyzer is installed.";
else
echo "✔️ rust-analyzer is already installed. Skipping.";
fi
config_dir=$(pwd);
mkdir -p "$HOME/.config";
cd "$HOME/.config" || exit;
symlink_dir "base16-shell";
symlink_dir "nvim" install_neovim_deps;
cd "$HOME" || exit;
# We just want to make sure these files are here. We'll actually will them with
# aliases and secrets over time
touch "$HOME/.zshrc.d/work.zsh";
touch "$HOME/.zshrc.d/private.zsh";
symlink_dir ".fzf" install_fzf;
symlink_dir ".zshrc.d";
symlink_file ".zshrc";
symlink_file ".zshenv";
symlink_file ".zprofile";
symlink_file ".gitconfig";
symlink_file ".tmux.conf";
symlink_file ".lesskey";
if [ ! -f "$HOME/.less" ]; then
lesskey $HOME/.lesskey
fi
|
#!/bin/bash
cd /home/projekt/weather-2.2
./weather -q -m cluj-napoca > /home/projekt/homeStruction/textSpeech/outsideData
|
import React, { Component } from 'react';
import Markdown from 'markdown-to-jsx';
import { Divider } from 'antd';
import TeX from '@matejmazur/react-katex';
import md5 from 'blueimp-md5';
import './css/md.css';
import 'katex/dist/katex.min.css';
let citations = {};
let related_links = {};
let figures = {};
let tables = {};
let equations = {};
class MdPage extends Component{
state = {
md_content : ''
}
componentWillMount(){
const { md_content } = this.props;
this.setState({
md_content
});
citations = {};
related_links = {};
figures = {};
tables = {};
}
addRelatedLink = (children, ...props )=>{
const href = children.href;
const title = children.children.join("")
const items = title + "|" + href;
if(items in related_links){
return [<a href={href}>{title}</a>,<sup><a href={'#' + md5(items)}>{related_links[items]}</a></sup>]
}else{
related_links = {...related_links,[items]: Object.keys(related_links).length + 1};
return [<a href={href}>{title}</a>, <sup><a href={'#' + md5(items)}>{related_links[items]}</a></sup>]
}
}
addCite = (children, ...props) =>{
const ref = children.children.join("")
if(!(ref in citations)){
citations = {
...citations,
[ref]: Object.keys(citations).length + 1
}
}
return [<a href={'#' + md5(ref)} className="done">[{citations[ref]}]</a>]
}
addTable = ({children, ...props}) =>{
return <div>
<table style={{display: 'table', textAlign: 'center'}}>
{children}
</table>
</div>
}
addTableCaption = ({children, ...props}) =>{
const {name} = props;
if(!(name in tables)){
tables = {
...tables,
[name]: Object.keys(tables).length + 1
}
}
return <div>
<p style={{textAlign:"center"}}>表:{tables[name]}{name} </p>
</div>
}
addEquations = ({children})=>{
const equ = children.join("");
if(!(equ in equations)){
equations = {
...equations,
[equ]: Object.keys(equations).length + 1
}
}
return <div style={{position: "relative"}}>
<TeX>{equ}</TeX>
<div style={{position: "absolute", top: '-2px', right: '5px'}}>({equations[equ]})</div>
</div>
}
addImage = ({children, ...props}) => {
const url = props.src;
const alt = props.alt;
if(!(url in figures)){
figures = {
...figures,
[url]: Object.keys(figures).length + 1
}
}
const width = props.width ? props.width : "80%";
const margin_left = (100 - parseInt(width, 10)) /2;
return <div>
<img src={url} width={width} style={{marginLeft: margin_left + "%"}} alt={url}/>
<p style={{textAlign:"center"}}>图{figures[url]}:{alt}</p>
</div>
}
componentDidMount(){
this.forceUpdate();
}
render() {
return <div className="markdown-body">
<Markdown
children={this.state.md_content}
options = {{
overrides: {
a: this.addRelatedLink,
img: this.addImage,
cite: this.addCite,
table: this.addTable,
tablecaption: this.addTableCaption,
equation: this.addEquations,
}
}}
/>
{Object.keys(related_links).length > 0 &&<div>
<Divider orientation="right">
Related Links
</Divider>
{Object.keys(related_links).map((item, index)=>{
const pku = item.split("|");
return <p key={index} id={"#"+md5(item)}>{index + 1} : {pku[0]} {pku[1]}</p>
})}
</div>
}
{Object.keys(citations).length > 0 &&<div>
<Divider orientation="right">
References
</Divider>
{Object.keys(citations).map((item, index)=>{
const v = citations[item]
return <p key={index} id={"#" + md5(v)}>[{index + 1}]: {item}</p>
})}
</div>
}
</div>
}
}
export default MdPage;
|
<filename>src/main/java/info/tritusk/adventure/platform/forge/impl/JsonLikeNBTCodec.java
package info.tritusk.adventure.platform.forge.impl;
import net.kyori.adventure.util.Codec;
import net.minecraft.nbt.CompoundNBT;
import net.minecraft.nbt.JsonToNBT;
import org.jetbrains.annotations.NotNull;
public class JsonLikeNBTCodec implements Codec<CompoundNBT, String, Exception, Exception> {
public static final JsonLikeNBTCodec INSTANCE = new JsonLikeNBTCodec();
@Override
public @NotNull CompoundNBT decode(@NotNull String encoded) throws Exception {
return JsonToNBT.parseTag(encoded);
}
@Override
public @NotNull String encode(@NotNull CompoundNBT decoded) {
return decoded.toString();
}
}
|
package com.bitsys.common.http.util.cache;
import java.util.Date;
/**
* This class represents a cache entry. It contains the cached value as well as
* the time at which the value was cached.
*
* @param <T> the type of value cached.
*/
public class CachedValue<T>
{
/** The cached value. */
private final T value;
/** The date when the result was cached. */
private final Date cacheDate;
/**
* Constructs a new {@linkplain CachedValue} using the given result. The
* cache date will be initialized to right now.
*
* @param value the cached value.
*/
public CachedValue(final T value)
{
this(value, new Date());
}
/**
* Constructs a new {@linkplain CachedValue} using the given result.
*
* @param value the cached value.
* @param cacheDate the date when the value was cached.
*/
public CachedValue(final T value, final Date cacheDate)
{
if (cacheDate == null)
{
throw new IllegalArgumentException("The cache date is null");
}
this.value = value;
this.cacheDate = cacheDate;
}
/**
* Returns the cached value.
*
* @return the cached value.
*/
public T getValue()
{
return value;
}
/**
* Returns the date when the value was cached.
*
* @return the date when the value was cached.
*/
public Date getCacheDate()
{
return cacheDate;
}
}
|
#!/bin/bash
source config.sh
python3 html2txt.py $1
echo
echo "$WEBSITE_ROOT${1%.markdown}.html"
|
#!/bin/bash
source /etc/profile
#=============================================================================#
# Task: BLAST T-cells immunodominant regions from sars cov1 to sars cov2 #
#=============================================================================#
#=======================================================================================#
# input folder: general output folder= #
# /home/damian/Documents/L3S/projects/sars_cov2/exp_epitopes/Bcells #
# output folder: blast folder = #
# /home/damian/Documents/L3S/projects/sars_cov2/sars_cov2_data/exp_epitopes_input/blast #
#=======================================================================================#
# set up input and output folders
input_dir="/home/damian/Documents/L3S/projects/sars_cov2/exp_epitopes/Tcells"
output_dir="/home/damian/Documents/L3S/projects/sars_cov2/sars_cov2_data/exp_epitopes_input/blast"
echo "BLASTP immunodominant regions of P59594 (Spike) to sars cov2 structural proteins"
blastp -task blastp-short -query $input_dir"/immunodom_regions_P59594.fasta" -db $output_dir"/sars_cov2_SMN3a" -max_target_seqs 1 -max_hsps 1 -evalue 1 -outfmt '6 qseqid sseqid length qlen slen qstart qend sstart send pident evalue' -out $output_dir"/P59594_blastp_sars_cov2.tsv" -num_threads 2
sed -i '1 i\qseqid\tsseqid\tlength\tqlen\tslen\tqstart\tqend\tsstart\tsend\tpident\tevalue' $output_dir"/P59594_blastp_sars_cov2.tsv"
echo "BLASTP immunodominant regions of P59595 (Nucleoprotein) to sars cov2 structural proteins"
blastp -task blastp-short -query $input_dir"/immunodom_regions_P59595.fasta" -db $output_dir"/sars_cov2_SMN3a" -max_target_seqs 1 -max_hsps 1 -evalue 1e-3 -outfmt '6 qseqid sseqid length qlen slen qstart qend sstart send pident evalue' -out $output_dir"/P59595_blastp_sars_cov2.tsv" -num_threads 2
sed -i '1 i\qseqid\tsseqid\tlength\tqlen\tslen\tqstart\tqend\tsstart\tsend\tpident\tevalue' $output_dir"/P59595_blastp_sars_cov2.tsv"
echo "BLASTP immunodominant regions of P59596 (Membrane) to sars cov2 structural proteins"
blastp -task blastp-short -query $input_dir"/immunodom_regions_P59596.fasta" -db $output_dir"/sars_cov2_SMN3a" -max_target_seqs 1 -max_hsps 1 -evalue 1 -outfmt '6 qseqid sseqid length qlen slen qstart qend sstart send pident evalue' -out $output_dir"/P59596_blastp_sars_cov2.tsv" -num_threads 2
sed -i '1 i\qseqid\tsseqid\tlength\tqlen\tslen\tqstart\tqend\tsstart\tsend\tpident\tevalue' $output_dir"/P59596_blastp_sars_cov2.tsv"
echo "BLASTP immunodominant regions of P59632 (ORF3a) to sars cov2 protein"
blastp -task blastp-short -query $input_dir"/immunodom_regions_P59632.fasta" -db $output_dir"/sars_cov2_SMN3a" -max_target_seqs 1 -max_hsps 1 -evalue 1 -outfmt '6 qseqid sseqid length qlen slen qstart qend sstart send pident evalue' -out $output_dir"/P59632_blastp_sars_cov2.tsv" -threshold 1 -num_threads 2
sed -i '1 i\qseqid\tsseqid\tlength\tqlen\tslen\tqstart\tqend\tsstart\tsend\tpident\tevalue' $output_dir"/P59632_blastp_sars_cov2.tsv"
|
<gh_stars>1-10
/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Mozilla Communicator client code, released
* March 31, 1998.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1998
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
*
* Alternatively, the contents of this file may be used under the terms of
* either of the GNU General Public License Version 2 or later (the "GPL"),
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
/**
* Try to throw a bunch of errors. Corresponds to
* the xpcTestCallJS in xpctest_calljs.cpp
*
*/
StartTest( "Evaluate JavaScript Throw Expressions" );
SetupTest();
AddTestData();
AddTestComment();
StopTest();
function SetupTest() {
CONTRACTID = "@mozilla.org/js/xpc/test/CallJS;1";
CLASS = Components.classes[CONTRACTID].createInstance();
IFACE = Components.interfaces.nsIXPCTestCallJS;
ERROR = Components.results;
caller = CLASS.QueryInterface(IFACE);
JSO = new Function();
JSO.prototype.Evaluate = new Function( "s",
"this.result = eval(s); return this.result" );
JSO.prototype.gotException = false;
JSO.prototype.exception = {};
JSO.prototype.EvaluateAndReturnError = new Function( "s", "r", "r = eval(s); return r;" );
// keep track of the errors we've tried to use.
ERRORS_WE_GOT = {};
for ( var p in Components.results ) {
ERRORS_WE_GOT[p] = false;
}
}
function AddTestData() {
var jso2 = new JSO();
jso2.Evaluate = new Function( "return NS_ERROR_XPC_JS_THREW_EXCEPTION" );
TestEvaluate(
"3",
undefined,
true,
"NS_ERROR_XPC_JAVASCRIPT_ERROR_WITH_DETAILS",
jso2 );
/*
* Throw args based on the number of args
*/
TestEvaluateArgs(
[],
undefined,
true,
"NS_ERROR_XPC_NOT_ENOUGH_ARGS" );
TestEvaluateArgs(
[3,2],
3,
false);
/*
* Set the JSOobject to something that is not an instance of nsIXPCTestCallJS
*
*/
TestSetJSObject(
true,
undefined,
true,
"NS_ERROR_XPC_BAD_CONVERT_JS");
}
function TestEvaluate (evalArg, eResult, eGotException, eExceptionName, jso ) {
if ( !jso )
var jso = new JSO();
caller.SetJSObject(jso);
try {
caller.Evaluate(evalArg);
} catch (e) {
jso.gotException = true;
jso.exception = e;
} finally {
AddTestCase(
"caller.Evaluate(" +evalArg+ "); jso.result ",
eResult,
jso.result);
AddTestCase(
"caller.Evaluate(" +evalArg+ "); jso.gotException ",
eGotException,
jso.gotException );
AddTestCase(
"caller.Evaluate(" +evalArg+ "); jso.exception.name",
eExceptionName,
jso.exception.name );
AddTestCase(
"caller.Evaluate(" +evalArg+ "); jso.exception.result",
Components.results[eExceptionName],
jso.exception.result );
if ( jso.gotException ) {
ERRORS_WE_GOT[ jso.exception.name ] = true;
}
}
}
function TestEvaluateToString (evalArg, outArg, eResult, eGotException, eExceptionName ) {
var jso = new JSO();
caller.SetJSObject(jso);
try {
caller.EvaluateToString(evalArg, outArg);
} catch (e) {
jso.gotException = true;
jso.exception = e;
} finally {
AddTestCase(
"caller.EvaluateToString(" +evalArg+","+outArg+"); jso.result ",
eResult,
jso.result);
AddTestCase(
"caller.EvaluateToString(" +evalArg+","+outArg+"); outArg ",
eResult,
outArg);
AddTestCase(
"jso.gotException ",
eGotException,
jso.gotException );
AddTestCase(
"jso.exception.name",
eExceptionName,
jso.exception.name );
AddTestCase(
"jso.exception.result",
Components.results[eExceptionName],
jso.exception.result );
if ( jso.gotException ) {
ERRORS_WE_GOT[ jso.exception.name ] = true;
}
}
}
function TestEvaluateArgs( argsArray, eResult, eGotException, eExceptionName ) {
var jso = new JSO;
caller.SetJSObject(jso);
try {
if ( argsArray.length > 0 ) {
eval( "caller.Evaluate("+argsArray.toString()+")" );
} else {
caller.Evaluate();
}
} catch (e) {
jso.gotException = true;
jso.exception = e;
} finally {
AddTestCase(
"callerEvaluate(" +argsArray+ "); jso.result ",
eResult,
jso.result);
AddTestCase(
"jso.gotException ",
eGotException,
jso.gotException );
AddTestCase(
"jso.exception.result",
Components.results[eExceptionName],
jso.exception.result );
AddTestCase(
"jso.exception.name",
eExceptionName,
jso.exception.name );
if ( jso.gotException ) {
ERRORS_WE_GOT[ jso.exception.name ] = true;
}
}
}
function TestSetJSObject( jso, eResult, eGotException, eExceptionName ) {
var exception = {};
var gotException = false;
var result;
try {
result = caller.SetJSObject( jso );
} catch (e) {
gotException = true;
exception = e;
} finally {
AddTestCase(
"caller.SetJSObject(" + jso +")",
eResult,
result );
AddTestCase(
"gotException? ",
eGotException,
gotException);
AddTestCase(
"exception.name",
eExceptionName,
exception.name );
if ( gotException ) {
ERRORS_WE_GOT[ exception.name ] = true;
}
}
}
function AddTestComment() {
var s = "This test exercised the exceptions defined in "+
"Components.results. The following errors were not exercised:\n";
for ( var p in ERRORS_WE_GOT ) {
if ( ERRORS_WE_GOT[p] == false ) {
s += p +"\n";
}
}
AddComment(s);
}
function JSException ( message, name, data ) {
this.message = message;
this.name = name;
this.result = Components.results[name];
this.location = 0;
this.data = (data) ? data : null;
this.initialize = new Function();
this.toString = new Function ( "return this.message" );
}
function TryIt(s) {
try {
eval(s);
} catch (e) {
Enumerate(e);
}
}
|
#!bin bash
# This is docker script to create this contsiner itself ( This container when deployed to ECS allows a realiable deployment machine for dev env at a very low cost)
echo "*************************************** Clean UP *******************************************"
echo "deleting dangling images from docker engine"
docker rmi -f `docker images --filter 'dangling=true' -q --no-trunc`
docker rm -v $(docker ps --filter status=exited -q 2>/dev/null) 2>/dev/null
echo "*************************************** Build *******************************************"
echo "building docker image"
docker build -t build-containers-for-git-branches-and-deploy:latest .
echo "docker image build successful " |
package rest
import (
"context"
"crypto/hmac"
"crypto/sha256"
"encoding/base64"
"encoding/hex"
"errors"
"io/ioutil"
"net/http"
"net/url"
"strconv"
"strings"
"time"
"github.com/cryptomarket/cryptomarket-go/args"
)
var (
apiURL = "https://api.exchange.cryptomkt.com"
apiVersion = "/api/2/"
)
// httpclient handles all the http logic, leaving public only whats needed.
// accepts Get, Post, Put and Delete functions, all with parameters and return
// the response bytes
type httpclient struct {
client *http.Client
apiKey string
apiSecret string
}
// New creates a new httpclient
func newHTTPClient(apiKey, apiSecret string) httpclient {
return httpclient{
client: &http.Client{},
apiKey: apiKey,
apiSecret: apiSecret,
}
}
func (hclient httpclient) doRequest(cxt context.Context, method, endpoint string, params map[string]interface{}, public bool) (result []byte, err error) {
// build query
rawQuery := buildQuery(params)
// build request
var req *http.Request
if method == methodGet {
req, err = http.NewRequestWithContext(cxt, method, apiURL+apiVersion+endpoint, nil)
req.URL.RawQuery = rawQuery
} else {
req, err = http.NewRequestWithContext(cxt, method, apiURL+apiVersion+endpoint, strings.NewReader(rawQuery))
}
if err != nil {
return nil, errors.New("CryptomarketSDKError: Can't build the request: " + err.Error())
}
req.Header.Add("User-Agent", "cryptomarket/go")
req.Header.Add("Content-type", "application/x-www-form-urlencoded")
// add auth header if is not a public call
if !public {
req.Header.Add("Authorization", hclient.buildCredential(method, endpoint, rawQuery))
}
// make request
resp, err := hclient.client.Do(req)
if err != nil {
return nil, errors.New("CryptomarketSDKError: Can't make the request: " + err.Error())
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, errors.New("CryptomarketSDKError: Can't read the response body: " + err.Error())
}
return body, nil
}
func (hclient httpclient) buildCredential(httpMethod, method, query string) string {
timestamp := strconv.FormatInt(time.Now().Unix(), 10)
msg := httpMethod + timestamp + apiVersion + method
if len(query) != 0 {
if httpMethod == methodGet {
msg += "?"
}
msg += query
}
h := hmac.New(sha256.New, []byte(hclient.apiSecret))
h.Write([]byte(msg))
signature := hex.EncodeToString(h.Sum(nil))
return "HS256 " + base64.StdEncoding.EncodeToString([]byte(hclient.apiKey+":"+timestamp+":"+signature))
}
func buildQuery(params map[string]interface{}) string {
query := url.Values{}
for key, value := range params {
switch v := value.(type) {
case []string:
strs := strings.Join(v, ",")
query.Add(key, strs)
case string:
query.Add(key, v)
case int:
query.Add(key, strconv.Itoa(v))
case args.IdentifyByType:
query.Add(key, string(v))
case args.MarginType:
query.Add(key, string(v))
case args.OrderType:
query.Add(key, string(v))
case args.PeriodType:
query.Add(key, string(v))
case args.SideType:
query.Add(key, string(v))
case args.SortByType:
query.Add(key, string(v))
case args.SortType:
query.Add(key, string(v))
case args.TimeInForceType:
query.Add(key, string(v))
}
}
return query.Encode()
}
|
/* global esFormsBlocksLocalization */
/**
* This is the main entry point for Block Editor blocks scripts used for the `WordPress admin editor`.
* This file registers blocks dynamically using `registerBlocks` helper method.
* File names must follow the naming convention to be able to run dynamically.
*
* `src/blocks/custom/block_name/manifest.json`.
* `src/blocks/custom/block_name/block_name.js`.
*
* Usage: `WordPress admin editor`.
*/
import { unregisterBlockType } from '@wordpress/blocks';
import { registerBlocks, outputCssVariablesGlobal } from '@eightshift/frontend-libs/scripts/editor';
import { Wrapper } from '../../wrapper/wrapper';
import WrapperManifest from '../../wrapper/manifest.json';
import globalSettings from '../../manifest.json';
registerBlocks(
globalSettings,
Wrapper,
WrapperManifest,
require.context('./../../components', true, /manifest.json$/),
require.context('./../../custom', true, /manifest.json$/),
require.context('./../../custom', true, /-block.js$/),
require.context('./../../custom', true, /-hooks.js$/),
require.context('./../../custom', true, /-transforms.js$/),
);
// Output global css variables.
outputCssVariablesGlobal(globalSettings);
// Remove form-selector block from anywhere else other than form CPT.
if (esFormsBlocksLocalization?.postType !== 'eightshift-forms') {
unregisterBlockType('eightshift-forms/form-selector');
}
|
<reponame>vimalkumarvelayudhan/galaxy<gh_stars>0
from galaxy import model
def build_collection( type, dataset_instances ):
"""
Build DatasetCollection with populated DatasetcollectionElement objects
corresponding to the supplied dataset instances or throw exception if
this is not a valid collection of the specified type.
"""
dataset_collection = model.DatasetCollection( )
set_collection_elements( dataset_collection, type, dataset_instances )
return dataset_collection
def set_collection_elements( dataset_collection, type, dataset_instances ):
element_index = 0
elements = []
for element in type.generate_elements( dataset_instances ):
element.element_index = element_index
element.collection = dataset_collection
elements.append( element )
element_index += 1
dataset_collection.elements = elements
return dataset_collection
|
#!/usr/bin/env bash
# Copyright (c) .NET Foundation and contributors. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for full license information.
# from https://github.com/dotnet/cli (July 21, 2019)
# Usage and parms, see: https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-install-script
# Stop script on NZEC
set -e
# Stop script if unbound variable found (use ${var:-} if intentional)
set -u
# By default cmd1 | cmd2 returns exit code of cmd2 regardless of cmd1 success
# This is causing it to fail
set -o pipefail
# Use in the the functions: eval $invocation
invocation='say_verbose "Calling: ${yellow:-}${FUNCNAME[0]} ${green:-}$*${normal:-}"'
# standard output may be used as a return value in the functions
# we need a way to write text on the screen in the functions so that
# it won't interfere with the return value.
# Exposing stream 3 as a pipe to standard output of the script itself
exec 3>&1
# Setup some colors to use. These need to work in fairly limited shells, like the Ubuntu Docker container where there are only 8 colors.
# See if stdout is a terminal
if [ -t 1 ] && command -v tput > /dev/null; then
# see if it supports colors
ncolors=$(tput colors)
if [ -n "$ncolors" ] && [ $ncolors -ge 8 ]; then
bold="$(tput bold || echo)"
normal="$(tput sgr0 || echo)"
black="$(tput setaf 0 || echo)"
red="$(tput setaf 1 || echo)"
green="$(tput setaf 2 || echo)"
yellow="$(tput setaf 3 || echo)"
blue="$(tput setaf 4 || echo)"
magenta="$(tput setaf 5 || echo)"
cyan="$(tput setaf 6 || echo)"
white="$(tput setaf 7 || echo)"
fi
fi
say_warning() {
printf "%b\n" "${yellow:-}dotnet_install: Warning: $1${normal:-}"
}
say_err() {
printf "%b\n" "${red:-}dotnet_install: Error: $1${normal:-}" >&2
}
say() {
# using stream 3 (defined in the beginning) to not interfere with stdout of functions
# which may be used as return value
printf "%b\n" "${cyan:-}dotnet-install:${normal:-} $1" >&3
}
say_verbose() {
if [ "$verbose" = true ]; then
say "$1"
fi
}
# This platform list is finite - if the SDK/Runtime has supported Linux distribution-specific assets,
# then and only then should the Linux distribution appear in this list.
# Adding a Linux distribution to this list does not imply distribution-specific support.
get_legacy_os_name_from_platform() {
eval $invocation
platform="$1"
case "$platform" in
"centos.7")
echo "centos"
return 0
;;
"debian.8")
echo "debian"
return 0
;;
"debian.9")
echo "debian.9"
return 0
;;
"fedora.23")
echo "fedora.23"
return 0
;;
"fedora.24")
echo "fedora.24"
return 0
;;
"fedora.27")
echo "fedora.27"
return 0
;;
"fedora.28")
echo "fedora.28"
return 0
;;
"opensuse.13.2")
echo "opensuse.13.2"
return 0
;;
"opensuse.42.1")
echo "opensuse.42.1"
return 0
;;
"opensuse.42.3")
echo "opensuse.42.3"
return 0
;;
"rhel.7"*)
echo "rhel"
return 0
;;
"ubuntu.14.04")
echo "ubuntu"
return 0
;;
"ubuntu.16.04")
echo "ubuntu.16.04"
return 0
;;
"ubuntu.16.10")
echo "ubuntu.16.10"
return 0
;;
"ubuntu.18.04")
echo "ubuntu.18.04"
return 0
;;
"alpine.3.4.3")
echo "alpine"
return 0
;;
esac
return 1
}
get_linux_platform_name() {
eval $invocation
if [ -n "$runtime_id" ]; then
echo "${runtime_id%-*}"
return 0
else
if [ -e /etc/os-release ]; then
. /etc/os-release
echo "$ID.$VERSION_ID"
return 0
elif [ -e /etc/redhat-release ]; then
local redhatRelease=$(</etc/redhat-release)
if [[ $redhatRelease == "CentOS release 6."* || $redhatRelease == "Red Hat Enterprise Linux Server release 6."* ]]; then
echo "rhel.6"
return 0
fi
fi
fi
say_verbose "Linux specific platform name and version could not be detected: UName = $uname"
return 1
}
get_current_os_name() {
eval $invocation
local uname=$(uname)
if [ "$uname" = "Darwin" ]; then
echo "osx"
return 0
elif [ "$uname" = "FreeBSD" ]; then
echo "freebsd"
return 0
elif [ "$uname" = "Linux" ]; then
local linux_platform_name
linux_platform_name="$(get_linux_platform_name)" || { echo "linux" && return 0 ; }
if [[ $linux_platform_name == "rhel.6" ]]; then
echo $linux_platform_name
return 0
elif [[ $linux_platform_name == alpine* ]]; then
echo "linux-musl"
return 0
else
echo "linux"
return 0
fi
fi
say_err "OS name could not be detected: UName = $uname"
return 1
}
get_legacy_os_name() {
eval $invocation
local uname=$(uname)
if [ "$uname" = "Darwin" ]; then
echo "osx"
return 0
elif [ -n "$runtime_id" ]; then
echo $(get_legacy_os_name_from_platform "${runtime_id%-*}" || echo "${runtime_id%-*}")
return 0
else
if [ -e /etc/os-release ]; then
. /etc/os-release
os=$(get_legacy_os_name_from_platform "$ID.$VERSION_ID" || echo "")
if [ -n "$os" ]; then
echo "$os"
return 0
fi
fi
fi
say_verbose "Distribution specific OS name and version could not be detected: UName = $uname"
return 1
}
machine_has() {
eval $invocation
hash "$1" > /dev/null 2>&1
return $?
}
check_min_reqs() {
local hasMinimum=false
if machine_has "curl"; then
hasMinimum=true
elif machine_has "wget"; then
hasMinimum=true
fi
if [ "$hasMinimum" = "false" ]; then
say_err "curl (recommended) or wget are required to download dotnet. Install missing prerequisite to proceed."
return 1
fi
return 0
}
check_pre_reqs() {
eval $invocation
if [ "${DOTNET_INSTALL_SKIP_PREREQS:-}" = "1" ]; then
return 0
fi
if [ "$(uname)" = "Linux" ]; then
if [ ! -x "$(command -v ldconfig)" ]; then
echo "ldconfig is not in PATH, trying /sbin/ldconfig."
LDCONFIG_COMMAND="/sbin/ldconfig"
else
LDCONFIG_COMMAND="ldconfig"
fi
local librarypath=${LD_LIBRARY_PATH:-}
LDCONFIG_COMMAND="$LDCONFIG_COMMAND -NXv ${librarypath//:/ }"
[ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libunwind)" ] && say_warning "Unable to locate libunwind. Probable prerequisite missing; install libunwind."
[ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libssl)" ] && say_warning "Unable to locate libssl. Probable prerequisite missing; install libssl."
[ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libicu)" ] && say_warning "Unable to locate libicu. Probable prerequisite missing; install libicu."
[ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep -F libcurl.so)" ] && say_warning "Unable to locate libcurl. Probable prerequisite missing; install libcurl."
fi
return 0
}
# args:
# input - $1
to_lowercase() {
#eval $invocation
echo "$1" | tr '[:upper:]' '[:lower:]'
return 0
}
# args:
# input - $1
remove_trailing_slash() {
#eval $invocation
local input="${1:-}"
echo "${input%/}"
return 0
}
# args:
# input - $1
remove_beginning_slash() {
#eval $invocation
local input="${1:-}"
echo "${input#/}"
return 0
}
# args:
# root_path - $1
# child_path - $2 - this parameter can be empty
combine_paths() {
eval $invocation
# TODO: Consider making it work with any number of paths. For now:
if [ ! -z "${3:-}" ]; then
say_err "combine_paths: Function takes two parameters."
return 1
fi
local root_path="$(remove_trailing_slash "$1")"
local child_path="$(remove_beginning_slash "${2:-}")"
say_verbose "combine_paths: root_path=$root_path"
say_verbose "combine_paths: child_path=$child_path"
echo "$root_path/$child_path"
return 0
}
get_machine_architecture() {
eval $invocation
if command -v uname > /dev/null; then
CPUName=$(uname -m)
case $CPUName in
armv7l)
echo "arm"
return 0
;;
aarch64)
echo "arm64"
return 0
;;
esac
fi
# Always default to 'x64'
echo "x64"
return 0
}
# args:
# architecture - $1
get_normalized_architecture_from_architecture() {
eval $invocation
local architecture="$(to_lowercase "$1")"
case "$architecture" in
\<auto\>)
echo "$(get_normalized_architecture_from_architecture "$(get_machine_architecture)")"
return 0
;;
amd64|x64)
echo "x64"
return 0
;;
arm)
echo "arm"
return 0
;;
arm64)
echo "arm64"
return 0
;;
esac
say_err "Architecture \`$architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/cli/issues"
return 1
}
# The version text returned from the feeds is a 1-line or 2-line string:
# For the SDK and the dotnet runtime (2 lines):
# Line 1: # commit_hash
# Line 2: # 4-part version
# For the aspnetcore runtime (1 line):
# Line 1: # 4-part version
# args:
# version_text - stdin
get_version_from_version_info() {
eval $invocation
cat | tail -n 1 | sed 's/\r$//'
return 0
}
# args:
# install_root - $1
# relative_path_to_package - $2
# specific_version - $3
is_dotnet_package_installed() {
eval $invocation
local install_root="$1"
local relative_path_to_package="$2"
local specific_version="${3//[$'\t\r\n']}"
local dotnet_package_path="$(combine_paths "$(combine_paths "$install_root" "$relative_path_to_package")" "$specific_version")"
say_verbose "is_dotnet_package_installed: dotnet_package_path=$dotnet_package_path"
if [ -d "$dotnet_package_path" ]; then
return 0
else
return 1
fi
}
# args:
# azure_feed - $1
# channel - $2
# normalized_architecture - $3
# coherent - $4
get_latest_version_info() {
eval $invocation
local azure_feed="$1"
local channel="$2"
local normalized_architecture="$3"
local coherent="$4"
local version_file_url=null
if [[ "$runtime" == "dotnet" ]]; then
version_file_url="$uncached_feed/Runtime/$channel/latest.version"
elif [[ "$runtime" == "aspnetcore" ]]; then
version_file_url="$uncached_feed/aspnetcore/Runtime/$channel/latest.version"
elif [ -z "$runtime" ]; then
if [ "$coherent" = true ]; then
version_file_url="$uncached_feed/Sdk/$channel/latest.coherent.version"
else
version_file_url="$uncached_feed/Sdk/$channel/latest.version"
fi
else
say_err "Invalid value for \$runtime"
return 1
fi
say_verbose "get_latest_version_info: latest url: $version_file_url"
download "$version_file_url"
return $?
}
# args:
# azure_feed - $1
# channel - $2
# normalized_architecture - $3
# version - $4
get_specific_version_from_version() {
eval $invocation
local azure_feed="$1"
local channel="$2"
local normalized_architecture="$3"
local version="$(to_lowercase "$4")"
case "$version" in
latest)
local version_info
version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" false)" || return 1
say_verbose "get_specific_version_from_version: version_info=$version_info"
echo "$version_info" | get_version_from_version_info
return 0
;;
coherent)
local version_info
version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" true)" || return 1
say_verbose "get_specific_version_from_version: version_info=$version_info"
echo "$version_info" | get_version_from_version_info
return 0
;;
*)
echo "$version"
return 0
;;
esac
}
# args:
# azure_feed - $1
# channel - $2
# normalized_architecture - $3
# specific_version - $4
construct_download_link() {
eval $invocation
local azure_feed="$1"
local channel="$2"
local normalized_architecture="$3"
local specific_version="${4//[$'\t\r\n']}"
local osname
osname="$(get_current_os_name)" || return 1
local download_link=null
if [[ "$runtime" == "dotnet" ]]; then
download_link="$azure_feed/Runtime/$specific_version/dotnet-runtime-$specific_version-$osname-$normalized_architecture.tar.gz"
elif [[ "$runtime" == "aspnetcore" ]]; then
download_link="$azure_feed/aspnetcore/Runtime/$specific_version/aspnetcore-runtime-$specific_version-$osname-$normalized_architecture.tar.gz"
elif [ -z "$runtime" ]; then
download_link="$azure_feed/Sdk/$specific_version/dotnet-sdk-$specific_version-$osname-$normalized_architecture.tar.gz"
else
return 1
fi
echo "$download_link"
return 0
}
# args:
# azure_feed - $1
# channel - $2
# normalized_architecture - $3
# specific_version - $4
construct_legacy_download_link() {
eval $invocation
local azure_feed="$1"
local channel="$2"
local normalized_architecture="$3"
local specific_version="${4//[$'\t\r\n']}"
local distro_specific_osname
distro_specific_osname="$(get_legacy_os_name)" || return 1
local legacy_download_link=null
if [[ "$runtime" == "dotnet" ]]; then
legacy_download_link="$azure_feed/Runtime/$specific_version/dotnet-$distro_specific_osname-$normalized_architecture.$specific_version.tar.gz"
elif [ -z "$runtime" ]; then
legacy_download_link="$azure_feed/Sdk/$specific_version/dotnet-dev-$distro_specific_osname-$normalized_architecture.$specific_version.tar.gz"
else
return 1
fi
echo "$legacy_download_link"
return 0
}
get_user_install_path() {
eval $invocation
if [ ! -z "${DOTNET_INSTALL_DIR:-}" ]; then
echo "$DOTNET_INSTALL_DIR"
else
echo "$HOME/.dotnet"
fi
return 0
}
# args:
# install_dir - $1
resolve_installation_path() {
eval $invocation
local install_dir=$1
if [ "$install_dir" = "<auto>" ]; then
local user_install_path="$(get_user_install_path)"
say_verbose "resolve_installation_path: user_install_path=$user_install_path"
echo "$user_install_path"
return 0
fi
echo "$install_dir"
return 0
}
# args:
# install_root - $1
get_installed_version_info() {
eval $invocation
local install_root="$1"
local version_file="$(combine_paths "$install_root" "$local_version_file_relative_path")"
say_verbose "Local version file: $version_file"
if [ ! -z "$version_file" ] | [ -r "$version_file" ]; then
local version_info="$(cat "$version_file")"
echo "$version_info"
return 0
fi
say_verbose "Local version file not found."
return 0
}
# args:
# relative_or_absolute_path - $1
get_absolute_path() {
eval $invocation
local relative_or_absolute_path=$1
echo "$(cd "$(dirname "$1")" && pwd -P)/$(basename "$1")"
return 0
}
# args:
# input_files - stdin
# root_path - $1
# out_path - $2
# override - $3
copy_files_or_dirs_from_list() {
eval $invocation
local root_path="$(remove_trailing_slash "$1")"
local out_path="$(remove_trailing_slash "$2")"
local override="$3"
local osname="$(get_current_os_name)"
local override_switch=$(
if [ "$override" = false ]; then
if [[ "$osname" == "linux-musl" ]]; then
printf -- "-u";
else
printf -- "-n";
fi
fi)
cat | uniq | while read -r file_path; do
local path="$(remove_beginning_slash "${file_path#$root_path}")"
local target="$out_path/$path"
if [ "$override" = true ] || (! ([ -d "$target" ] || [ -e "$target" ])); then
mkdir -p "$out_path/$(dirname "$path")"
cp -R $override_switch "$root_path/$path" "$target"
fi
done
}
# args:
# zip_path - $1
# out_path - $2
extract_dotnet_package() {
eval $invocation
local zip_path="$1"
local out_path="$2"
local temp_out_path="$(mktemp -d "$temporary_file_template")"
local failed=false
tar -xzf "$zip_path" -C "$temp_out_path" > /dev/null || failed=true
local folders_with_version_regex='^.*/[0-9]+\.[0-9]+[^/]+/'
find "$temp_out_path" -type f | grep -Eo "$folders_with_version_regex" | sort | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" false
find "$temp_out_path" -type f | grep -Ev "$folders_with_version_regex" | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" "$override_non_versioned_files"
rm -rf "$temp_out_path"
if [ "$failed" = true ]; then
say_err "Extraction failed"
return 1
fi
}
# args:
# remote_path - $1
# [out_path] - $2 - stdout if not provided
download() {
eval $invocation
local remote_path="$1"
local out_path="${2:-}"
if [[ "$remote_path" != "http"* ]]; then
cp "$remote_path" "$out_path"
return $?
fi
local failed=false
if machine_has "curl"; then
downloadcurl "$remote_path" "$out_path" || failed=true
elif machine_has "wget"; then
downloadwget "$remote_path" "$out_path" || failed=true
else
failed=true
fi
if [ "$failed" = true ]; then
say_verbose "Download failed: $remote_path"
return 1
fi
return 0
}
downloadcurl() {
eval $invocation
local remote_path="$1"
local out_path="${2:-}"
# Append feed_credential as late as possible before calling curl to avoid logging feed_credential
remote_path="${remote_path}${feed_credential}"
local failed=false
if [ -z "$out_path" ]; then
curl --retry 10 -sSL -f --create-dirs "$remote_path" || failed=true
else
curl --retry 10 -sSL -f --create-dirs -o "$out_path" "$remote_path" || failed=true
fi
if [ "$failed" = true ]; then
say_verbose "Curl download failed"
return 1
fi
return 0
}
downloadwget() {
eval $invocation
local remote_path="$1"
local out_path="${2:-}"
# Append feed_credential as late as possible before calling wget to avoid logging feed_credential
remote_path="${remote_path}${feed_credential}"
local failed=false
if [ -z "$out_path" ]; then
wget -q --tries 10 -O - "$remote_path" || failed=true
else
wget --tries 10 -O "$out_path" "$remote_path" || failed=true
fi
if [ "$failed" = true ]; then
say_verbose "Wget download failed"
return 1
fi
return 0
}
calculate_vars() {
eval $invocation
valid_legacy_download_link=true
normalized_architecture="$(get_normalized_architecture_from_architecture "$architecture")"
say_verbose "normalized_architecture=$normalized_architecture"
specific_version="$(get_specific_version_from_version "$azure_feed" "$channel" "$normalized_architecture" "$version")"
say_verbose "specific_version=$specific_version"
if [ -z "$specific_version" ]; then
say_err "Could not resolve version information."
return 1
fi
download_link="$(construct_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version")"
say_verbose "Constructed primary named payload URL: $download_link"
legacy_download_link="$(construct_legacy_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version")" || valid_legacy_download_link=false
if [ "$valid_legacy_download_link" = true ]; then
say_verbose "Constructed legacy named payload URL: $legacy_download_link"
else
say_verbose "Cound not construct a legacy_download_link; omitting..."
fi
install_root="$(resolve_installation_path "$install_dir")"
say_verbose "InstallRoot: $install_root"
}
install_dotnet() {
eval $invocation
local download_failed=false
local asset_name=''
local asset_relative_path=''
if [[ "$runtime" == "dotnet" ]]; then
asset_relative_path="shared/Microsoft.NETCore.App"
asset_name=".NET Core Runtime"
elif [[ "$runtime" == "aspnetcore" ]]; then
asset_relative_path="shared/Microsoft.AspNetCore.App"
asset_name="ASP.NET Core Runtime"
elif [ -z "$runtime" ]; then
asset_relative_path="sdk"
asset_name=".NET Core SDK"
else
say_err "Invalid value for \$runtime"
return 1
fi
# Check if the SDK version is already installed.
if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$specific_version"; then
say "$asset_name version $specific_version is already installed."
return 0
fi
mkdir -p "$install_root"
zip_path="$(mktemp "$temporary_file_template")"
say_verbose "Zip path: $zip_path"
say "Downloading link: $download_link"
# Failures are normal in the non-legacy case for ultimately legacy downloads.
# Do not output to stderr, since output to stderr is considered an error.
download "$download_link" "$zip_path" 2>&1 || download_failed=true
# if the download fails, download the legacy_download_link
if [ "$download_failed" = true ]; then
say "Cannot download: $download_link"
if [ "$valid_legacy_download_link" = true ]; then
download_failed=false
download_link="$legacy_download_link"
zip_path="$(mktemp "$temporary_file_template")"
say_verbose "Legacy zip path: $zip_path"
say "Downloading legacy link: $download_link"
download "$download_link" "$zip_path" 2>&1 || download_failed=true
if [ "$download_failed" = true ]; then
say "Cannot download: $download_link"
fi
fi
fi
if [ "$download_failed" = true ]; then
say_err "Could not find/download: \`$asset_name\` with version = $specific_version"
say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
return 1
fi
say "Extracting zip from $download_link"
extract_dotnet_package "$zip_path" "$install_root"
# Check if the SDK version is now installed; if not, fail the installation.
if ! is_dotnet_package_installed "$install_root" "$asset_relative_path" "$specific_version"; then
say_err "\`$asset_name\` with version = $specific_version failed to install with an unknown error."
return 1
fi
return 0
}
args=("$@")
local_version_file_relative_path="/.version"
bin_folder_relative_path=""
temporary_file_template="${TMPDIR:-/tmp}/dotnet.XXXXXXXXX"
channel="LTS"
version="Latest"
install_dir="<auto>"
architecture="<auto>"
dry_run=false
no_path=false
no_cdn=false
azure_feed="https://dotnetcli.azureedge.net/dotnet"
uncached_feed="https://dotnetcli.blob.core.windows.net/dotnet"
feed_credential=""
verbose=false
runtime=""
runtime_id=""
override_non_versioned_files=true
non_dynamic_parameters=""
while [ $# -ne 0 ]
do
name="$1"
case "$name" in
-c|--channel|-[Cc]hannel)
shift
channel="$1"
;;
-v|--version|-[Vv]ersion)
shift
version="$1"
;;
-i|--install-dir|-[Ii]nstall[Dd]ir)
shift
install_dir="$1"
;;
--arch|--architecture|-[Aa]rch|-[Aa]rchitecture)
shift
architecture="$1"
;;
--shared-runtime|-[Ss]hared[Rr]untime)
say_warning "The --shared-runtime flag is obsolete and may be removed in a future version of this script. The recommended usage is to specify '--runtime dotnet'."
if [ -z "$runtime" ]; then
runtime="dotnet"
fi
;;
--runtime|-[Rr]untime)
shift
runtime="$1"
if [[ "$runtime" != "dotnet" ]] && [[ "$runtime" != "aspnetcore" ]]; then
say_err "Unsupported value for --runtime: '$1'. Valid values are 'dotnet' and 'aspnetcore'."
exit 1
fi
;;
--dry-run|-[Dd]ry[Rr]un)
dry_run=true
;;
--no-path|-[Nn]o[Pp]ath)
no_path=true
non_dynamic_parameters+=" $name"
;;
--verbose|-[Vv]erbose)
verbose=true
non_dynamic_parameters+=" $name"
;;
--no-cdn|-[Nn]o[Cc]dn)
no_cdn=true
non_dynamic_parameters+=" $name"
;;
--azure-feed|-[Aa]zure[Ff]eed)
shift
azure_feed="$1"
non_dynamic_parameters+=" $name "\""$1"\"""
;;
--uncached-feed|-[Uu]ncached[Ff]eed)
shift
uncached_feed="$1"
non_dynamic_parameters+=" $name "\""$1"\"""
;;
--feed-credential|-[Ff]eed[Cc]redential)
shift
feed_credential="$1"
non_dynamic_parameters+=" $name "\""$1"\"""
;;
--runtime-id|-[Rr]untime[Ii]d)
shift
runtime_id="$1"
non_dynamic_parameters+=" $name "\""$1"\"""
;;
--skip-non-versioned-files|-[Ss]kip[Nn]on[Vv]ersioned[Ff]iles)
override_non_versioned_files=false
non_dynamic_parameters+=" $name"
;;
-?|--?|-h|--help|-[Hh]elp)
script_name="$(basename "$0")"
echo ".NET Tools Installer"
echo "Usage: $script_name [-c|--channel <CHANNEL>] [-v|--version <VERSION>] [-p|--prefix <DESTINATION>]"
echo " $script_name -h|-?|--help"
echo ""
echo "$script_name is a simple command line interface for obtaining dotnet cli."
echo ""
echo "Options:"
echo " -c,--channel <CHANNEL> Download from the channel specified, Defaults to \`$channel\`."
echo " -Channel"
echo " Possible values:"
echo " - Current - most current release"
echo " - LTS - most current supported release"
echo " - 2-part version in a format A.B - represents a specific release"
echo " examples: 2.0; 1.0"
echo " - Branch name"
echo " examples: release/2.0.0; Master"
echo " Note: The version parameter overrides the channel parameter."
echo " -v,--version <VERSION> Use specific VERSION, Defaults to \`$version\`."
echo " -Version"
echo " Possible values:"
echo " - latest - most latest build on specific channel"
echo " - coherent - most latest coherent build on specific channel"
echo " coherent applies only to SDK downloads"
echo " - 3-part version in a format A.B.C - represents specific version of build"
echo " examples: 2.0.0-preview2-006120; 1.1.0"
echo " -i,--install-dir <DIR> Install under specified location (see Install Location below)"
echo " -InstallDir"
echo " --architecture <ARCHITECTURE> Architecture of dotnet binaries to be installed, Defaults to \`$architecture\`."
echo " --arch,-Architecture,-Arch"
echo " Possible values: x64, arm, and arm64"
echo " --runtime <RUNTIME> Installs a shared runtime only, without the SDK."
echo " -Runtime"
echo " Possible values:"
echo " - dotnet - the Microsoft.NETCore.App shared runtime"
echo " - aspnetcore - the Microsoft.AspNetCore.App shared runtime"
echo " --skip-non-versioned-files Skips non-versioned files if they already exist, such as the dotnet executable."
echo " -SkipNonVersionedFiles"
echo " --dry-run,-DryRun Do not perform installation. Display download link."
echo " --no-path, -NoPath Do not set PATH for the current process."
echo " --verbose,-Verbose Display diagnostics information."
echo " --azure-feed,-AzureFeed Azure feed location. Defaults to $azure_feed, This parameter typically is not changed by the user."
echo " --uncached-feed,-UncachedFeed Uncached feed location. This parameter typically is not changed by the user."
echo " --no-cdn,-NoCdn Disable downloading from the Azure CDN, and use the uncached feed directly."
echo " --feed-credential,-FeedCredential Azure feed shared access token. This parameter typically is not specified."
echo " --runtime-id Installs the .NET Tools for the given platform (use linux-x64 for portable linux)."
echo " -RuntimeId"
echo " -?,--?,-h,--help,-Help Shows this help message"
echo ""
echo "Obsolete parameters:"
echo " --shared-runtime The recommended alternative is '--runtime dotnet'."
echo " -SharedRuntime Installs just the shared runtime bits, not the entire SDK."
echo ""
echo "Install Location:"
echo " Location is chosen in following order:"
echo " - --install-dir option"
echo " - Environmental variable DOTNET_INSTALL_DIR"
echo " - $HOME/.dotnet"
exit 0
;;
*)
say_err "Unknown argument \`$name\`"
exit 1
;;
esac
shift
done
if [ "$no_cdn" = true ]; then
azure_feed="$uncached_feed"
fi
check_min_reqs
calculate_vars
script_name=$(basename "$0")
if [ "$dry_run" = true ]; then
say "Payload URLs:"
say "Primary named payload URL: $download_link"
if [ "$valid_legacy_download_link" = true ]; then
say "Legacy named payload URL: $legacy_download_link"
fi
repeatable_command="./$script_name --version "\""$specific_version"\"" --install-dir "\""$install_root"\"" --architecture "\""$normalized_architecture"\"""
if [[ "$runtime" == "dotnet" ]]; then
repeatable_command+=" --runtime "\""dotnet"\"""
elif [[ "$runtime" == "aspnetcore" ]]; then
repeatable_command+=" --runtime "\""aspnetcore"\"""
fi
repeatable_command+="$non_dynamic_parameters"
say "Repeatable invocation: $repeatable_command"
exit 0
fi
check_pre_reqs
install_dotnet
bin_path="$(get_absolute_path "$(combine_paths "$install_root" "$bin_folder_relative_path")")"
if [ "$no_path" = false ]; then
say "Adding to current process PATH: \`$bin_path\`. Note: This change will be visible only when sourcing script."
export PATH="$bin_path":"$PATH"
else
say "Binaries of dotnet can be found in $bin_path"
fi
say "Installation finished successfully."
|
rm -rf /usr/WeatherServiceClient
cd /home/Weather-Station
git pull
cp -r WeatherServiceClient /usr
|
<filename>gamebox/settings/dev.py
from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-943fi#vpaf#m!%52a5l01eoirmf4gnojp&!5s#99ikb9*j#0jn'
# SECURITY WARNING: define the correct hosts in production!
ALLOWED_HOSTS = ['*']
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
INSTALLED_APPS = INSTALLED_APPS + [
'debug_toolbar'
]
MIDDLEWARE = MIDDLEWARE + [
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
INTERNAL_IPS = ("127.0.0.1")
try:
from .local import *
except ImportError:
pass
|
import requests
from bs4 import BeautifulSoup
url = 'https://example.com/'
r = requests.get(url)
soup = BeautifulSoup(r.text, 'html.parser')
bitcoin_price = soup.find('span', {'class': 'btc-price'}).text
print(f'The current Bitcoin price is {bitcoin_price}') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.