text stringlengths 1 1.05M |
|---|
#!/usr/bin/bash
set -e
echo "Info: build-install-tree.sh"
# BUILD_ROOT is the build prefix e.g. /var/mock/xxx
BUILD_ROOT=${1:? build root}
# BIN, LIB, MAN1 and DESKTOPFILES are path on the target e.g. /usr/bin etc
BIN=${2? bin folder}
LIB=${3? lib folder}
MAN1=${4? man1 folder}
DOC=${5? doc folder}
DESKTOPFILES=${6? desktop files folder}
echo "Info: BIN ${BIN}"
echo "Info: LIB ${LIB}"
echo "Info: MAN1 ${MAN1}"
echo "Info: DOC ${DOC}"
echo "Info: DESTTOPFILES ${DESTTOPFILES}"
set -x
mkdir -p ${BUILD_ROOT}${BIN} ${BUILD_ROOT}${LIB} ${BUILD_ROOT}${MAN1} ${BUILD_ROOT}${DOC} ${BUILD_ROOT}${DESKTOPFILES}
gzip -c ${BUILDER_TOP_DIR}/Kit/Fedora/scm-workbench.1 > ${BUILD_ROOT}${MAN1}/scm-workbench.1.gz
cp ${BUILDER_TOP_DIR}/Kit/Fedora/scm-workbench.desktop ${BUILD_ROOT}${DESKTOPFILES}
PROG=${BUILD_ROOT}${BIN}/scm-workbench
cat <<EOF >${PROG}
#!${PYTHON}
import sys
sys.path.insert( 0, "${LIB}" )
EOF
cat ${BUILDER_TOP_DIR}/Source/Scm/wb_scm_main.py >>${PROG}
chmod +x ${PROG}
unset PROG
PROG=${BUILD_ROOT}${BIN}/scm-workbench-git-callback
echo '#!/usr/bin/python3' >${PROG}
cat ${BUILDER_TOP_DIR}/Source/Git/wb_git_callback_client_unix.py >>${PROG}
chmod +x ${PROG}
unset PROG
pushd ${BUILDER_TOP_DIR}/Source/Scm
make -f linux.mak
popd
pushd ${BUILDER_TOP_DIR}/Source/Common
make -f linux.mak
popd
for LIBSRC in \
${BUILDER_TOP_DIR}/Source/Common \
${BUILDER_TOP_DIR}/Source/Git \
${BUILDER_TOP_DIR}/Source/Hg \
${BUILDER_TOP_DIR}/Source/Perforce \
${BUILDER_TOP_DIR}/Source/Svn \
${BUILDER_TOP_DIR}/Source/Scm
do
cp ${LIBSRC}/*.py ${BUILD_ROOT}${LIB}
done
LOCAL_SITE_PACKAGES="${HOME}/.local/lib/python3.5/site-packages"
for MOD_PACKAGE in pytz tzlocal git gitdb smmap xml_preferences
do
if [ -e "${LOCAL_SITE_PACKAGES}/${MOD_PACKAGE}" ]
then
cp -r "${LOCAL_SITE_PACKAGES}/${MOD_PACKAGE}" ${BUILD_ROOT}${LIB}
fi
done
for MOD_FILE in P4.py P4API.cpython-??m-x86_64-linux-gnu.so
do
if [ -e "${LOCAL_SITE_PACKAGES}/${MOD_FILE}" ]
then
cp "${LOCAL_SITE_PACKAGES}/${MOD_FILE}" ${BUILD_ROOT}${LIB}
fi
done
rm -f ${BUILD_ROOT}${LIB}/make*.py
${BUILDER_TOP_DIR}/Docs/build-docs.py ${BUILD_ROOT}${DOC}
cat <<EOF >>${BUILD_ROOT}${LIB}/wb_platform_unix_specific.py
doc_dir = "${DOC}"
EOF
cp ${BUILDER_TOP_DIR}/Source/wb.png ${BUILD_ROOT}${LIB}/scm-workbench.png
|
<filename>app/models/medical_treatment.rb
class MedicalTreatment < ActiveRecord::Base
belongs_to :medical_problem
has_many :tasks
has_many :bat_changes, :order => "date desc"
def self.current
find(:all, :conditions => 'date_closed is null')
end
def self.expired
find(:all, :conditions => 'date_closed is not null', :order => "date_opened desc")
end
def done_today
if self.tasks.length == 0
return "no_task"
else
todays_treatment_task = Task.find(:first, :conditions => '(medical_treatment_id = ' + self.id.to_s + ') and (repeat_code = ' + (Time.now.wday + 1).to_s + ') and (date_ended is null)')
if todays_treatment_task
if (todays_treatment_task.done_by_schedule == false)
return false
else
return true
end
else
return "no_task"
end
end
end
def todays_task
return Task.find(:first, :conditions => '(medical_treatment_id = ' + self.id.to_s + ') and (repeat_code = ' + (Time.now.wday + 1).to_s + ') and (date_ended is null)')
end
def end_treatment
self.tasks.current.each{|task| task.deactivate}
self.date_closed = Date.today
self.save
end
def task_histories
tasks = self.tasks
task_histories = Array.new
tasks.each{|task| task.task_histories.each{|task_history| task_histories << task_history}}
task_histories = task_histories.sort_by{|task_history| [Time.now - task_history.date_done]}
return task_histories
end
def self.populate_bat_changes
all_treatments = self.find(:all)
for treatment in all_treatments
treatment_started = BatChange.new
treatment_started.date = treatment.date_opened
treatment_started.bat = treatment.medical_problem.bat
treatment_started.note = "STARTED: " + treatment.medical_problem.title
treatment_started.medical_treatment = treatment
#bat_change.user = no user logging in original design
treatment_started.save
if treatment.date_closed != nil
treatment_ended = BatChange.new
treatment_ended.date = treatment.date_closed
treatment_ended.bat = treatment.medical_problem.bat
treatment_ended.note = "ENDED: " + treatment.medical_problem.title
treatment_ended.medical_treatment = treatment
#treatment_ended.user = no user logging in original design
treatment_ended.save
end
end
end
def before_save
end
def after_save
#create a bat change
end
end |
#!/bin/bash -x
#
# Install various tools (Debian packages) used in the configuration.
#
# @author Michal Turek
#
apt-get update
apt-get install --yes acpi
apt-get install --yes alsa-utils
apt-get install --yes apt-file
apt-get install --yes arandr
apt-get install --yes autoconf
apt-get install --yes automake
apt-get install --yes bash-completion
apt-get install --yes brightnessctl
apt-get install --yes chromium
apt-get install --yes cmake
apt-get install --yes colorgcc
apt-get install --yes colormake
apt-get install --yes curl
apt-get install --yes detox
apt-get install --yes dnsutils # nslookup
apt-get install --yes droopy
apt-get install --yes exif
apt-get install --yes exiftran
apt-get install --yes featherpad
apt-get install --yes firefox
apt-get install --yes gddccontrol
apt-get install --yes gimp
apt-get install --yes gksu
apt-get install --yes gmrun
apt-get install --yes gpicview
apt-get install --yes hplip
apt-get install --yes hsetroot
apt-get install --yes htop
apt-get install --yes httpie
apt-get install --yes inkscape
apt-get install --yes iotop
apt-get install --yes jmtpfs
apt-get install --yes jq
apt-get install --yes kcalc
apt-get install --yes kcolorchooser
apt-get install --yes kde-spectacle
apt-get install --yes konqueror
apt-get install --yes kwrite
apt-get install --yes libreoffice-common
apt-get install --yes libssl-dev # rust development
apt-get install --yes lxrandr
apt-get install --yes maven
apt-get install --yes mc
apt-get install --yes meld
apt-get install --yes mplayer
apt-get install --yes mtp-tools
apt-get install --yes ncdu
apt-get install --yes net-tools # ifconfig, netstat
apt-get install --yes ngrep
apt-get install --yes ntpdate
apt-get install --yes numlockx
apt-get install --yes okular
apt-get install --yes pdsh
apt-get install --yes pekwm
apt-get install --yes pkg-config # rust development
apt-get install --yes pnmixer
apt-get install --yes policykit-1-gnome # synaptic
apt-get install --yes printer-driver-all
apt-get install --yes procps # watch
apt-get install --yes rsync
apt-get install --yes shellcheck
apt-get install --yes stalonetray
apt-get install --yes statgrab # xsysguard
apt-get install --yes strace
apt-get install --yes thunderbird
apt-get install --yes traceroute
apt-get install --yes ttf-mscorefonts-installer
apt-get install --yes unrar
apt-get install --yes vim
apt-get install --yes vlc
apt-get install --yes wireshark
apt-get install --yes x11-xserver-utils # xset, xrdb, xrandr
apt-get install --yes xosd-bin
apt-get install --yes xscreensaver
apt-get install --yes xserver-xorg-input-synaptics # synclient
apt-get install --yes xterm
apt-get install --yes xxkb
apt-get install --yes yeahconsole
|
<reponame>smagill/opensphere-desktop
package io.opensphere.kml.datasource.controller;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.net.URL;
import java.security.GeneralSecurityException;
import javax.xml.stream.events.StartElement;
import org.apache.log4j.Logger;
import de.micromata.opengis.kml.v_2_2_0.Kml;
import io.opensphere.core.Toolbox;
import io.opensphere.core.util.StartElementInspector;
import io.opensphere.core.util.XMLUtilities;
import io.opensphere.core.util.net.HttpUtilities;
import io.opensphere.kml.envoy.StreamUtilities;
/**
* Inspects the beginning of an input stream to see if it is actually kml data
* or not.
*
*/
public class KMLPeeker implements StartElementInspector
{
/**
* Used to log error messages.
*/
private static final Logger LOGGER = Logger.getLogger(KMLPeeker.class);
/**
* The system toolbox.
*/
private final Toolbox myToolbox;
/**
* Constructs a new kml peeker.
*
* @param toolbox The system toolbox.
*/
public KMLPeeker(Toolbox toolbox)
{
myToolbox = toolbox;
}
/**
* Checks to see if the url contains KML data.
*
* @param url The url to check.
* @return True if the url is KML data, false otherwise.
*/
public boolean isKml(URL url)
{
boolean isKml = false;
try
{
InputStream stream = openStream(url);
if (stream != null)
{
try
{
isKml = XMLUtilities.canUnmarshal(stream, this);
// Now check if a kmz.
if (!isKml)
{
stream.close();
stream = openStream(url);
if (stream != null)
{
isKml = StreamUtilities.isZipInputStreamNoReset(stream);
}
}
}
finally
{
if (stream != null)
{
stream.close();
}
}
}
}
catch (IOException | GeneralSecurityException | URISyntaxException e)
{
LOGGER.error(e.getMessage(), e);
}
return isKml;
}
@Override
public boolean isValidStartElement(StartElement element)
{
String kmlTag = Kml.class.getSimpleName().toLowerCase();
boolean isValid = element.getName().getLocalPart().equals(kmlTag);
if (!isValid)
{
String namespace = element.getName().getNamespaceURI();
isValid = namespace.contains(kmlTag);
}
return isValid;
}
/**
* Opens the stream.
*
* @param url The url to open.
* @return The opened stream.
* @throws GeneralSecurityException Thrown if there are security issues
* trying to open the url.
* @throws IOException Thrown if there IO issues opening the url.
* @throws URISyntaxException If the url could not be converted to a URI.
*/
private InputStream openStream(URL url) throws GeneralSecurityException, IOException, URISyntaxException
{
InputStream stream = null;
if (!url.getProtocol().equalsIgnoreCase("file"))
{
try
{
stream = HttpUtilities.sendGet(url, myToolbox.getServerProviderRegistry());
}
catch (IOException e)
{
LOGGER.info(e);
}
}
else
{
stream = new FileInputStream(url.getFile());
}
return stream;
}
}
|
package org.chzz.textview.util;
import android.util.Log;
import org.chzz.textview.BuildConfig;
/**
* Created by hanks on 15-12-14.
*/
public class HLog {
public static void i(Object s){
if(BuildConfig.DEBUG) {
Log.i("HLog", s.toString());
}
}
}
|
class Student:
def __init__(self, name):
self._name = name
@property
def name(self):
return self._name
@name.setter
def name(self, name):
if not isinstance(name, str) or not name:
raise ValueError("Name must be a non-empty string")
self._name = name |
import hashlib
import sys
def duplicate_sim(sim_name, sim_file, sim_args):
sim_info = sim_name + sim_file + ''.join(sim_args)
db_id = hashlib.md5(sim_info.encode()).hexdigest()
return db_id |
<gh_stars>0
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_golf_course_outline = void 0;
var ic_golf_course_outline = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0V0z",
"fill": "none"
},
"children": []
}, {
"name": "circle",
"attribs": {
"cx": "19.5",
"cy": "19.5",
"r": "1.5"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M17 5.92L9 2v18H7v-1.73c-1.79.35-3 .99-3 1.73 0 1.1 2.69 2 6 2s6-.9 6-2c0-.99-2.16-1.81-5-1.97V8.98l6-3.06z"
},
"children": []
}]
};
exports.ic_golf_course_outline = ic_golf_course_outline; |
<reponame>Teemmer/pump-41
import numpy as np
from math import sqrt
class Delaunay:
def __init__(self, center=(0, 0), radius=9999):
""" Init and create a new frame to contain the triangulation
center -- Optional position for the center of the frame. Default (0,0)
radius -- Optional distance from corners to the center.
"""
center = np.asarray(center)
# Краї рамки (для краси виводу, нічо до методу не має)
self.coords = [center+radius*np.array((-1, -1)),
center+radius*np.array((+1, -1)),
center+radius*np.array((+1, +1)),
center+radius*np.array((-1, +1))]
# Два словники для збереження трикутників і кіл, описаних навколо них
self.triangles = {}
self.circles = {}
# Два початкові трикутники (проти годинникової стрілки)
T1 = (0, 1, 3)
T2 = (2, 3, 1)
self.triangles[T1] = [T2, None, None]
self.triangles[T2] = [T1, None, None]
# Обчислюємо описане коло навколо кожного трикутника
for t in self.triangles:
self.circles[t] = self.circumcenter(t)
def circumcenter(self, tri):
"""
Описане коло навколо трикутника (див скворцова пункт 1.3.1)
"""
pts = np.asarray([self.coords[v] for v in tri])
pts2 = np.dot(pts, pts.T)
A = np.bmat([[2 * pts2, [[1],
[1],
[1]]],
[[[1, 1, 1, 0]]]])
b = np.hstack((np.sum(pts * pts, axis=1), [1]))
x = np.linalg.solve(A, b)
bary_coords = x[:-1]
center = np.dot(bary_coords, pts)
radius = np.sum(np.square(pts[0] - center))
return (center, radius)
def in_circle(self, tri, p):
"""Перевірка чи потрапляє точка в коло, описане навколо трикутника
"""
center, radius = self.circles[tri]
return np.sum(np.square(center - p)) <= radius
def add_point(self, p):
"""Додаємо точку до поточної триангуляції (див Вступ глави 2 і пункт 2.1.1)
"""
# Додаємо точку в масив вершин полігона
p = np.asarray(p)
idx = len(self.coords)
self.coords.append(p)
# Шукаємо трикутники, в коло яких потрапляє точка
bad_triangles = []
for T in self.triangles:
if self.in_circle(T, p):
bad_triangles.append(T)
# Знайдемо межу (проти годинникової стрілки - ПГС) "поганих трикутників" у вигляді
# списку граней та протилежного трикутника для кожної грані
boundary = []
T = bad_triangles[0]
edge = 0
# пошук протилежного до грані трикутника
while True:
# Перевіряємо чи трикутник Т на межі
tri_op = self.triangles[T][edge]
if tri_op not in bad_triangles:
# Додаємо до списку межі грань та протилежний трикутник
boundary.append((T[(edge+1) % 3], T[(edge-1) % 3], tri_op))
# Переходимо до наступної грані
edge = (edge + 1) % 3
# Перевіряємо чи межа замкнулася (умова виходу з циклу)
if boundary[0][0] == boundary[-1][1]:
break
else:
# Переходимо до наступної грані протилежного трикутника (ПГС)
edge = (self.triangles[tri_op].index(T) + 1) % 3
T = tri_op
# Видаляємо непідходящі трикутники з поточного набору трикутників
for T in bad_triangles:
del self.triangles[T]
del self.circles[T]
# Робимо нову триангуляцію в "прогалині", яка залишилася після видалення трикутників
new_triangles = []
for (e0, e1, tri_op) in boundary:
# Додаємо новий трикутник
T = (idx, e0, e1)
# Описуємо коло
self.circles[T] = self.circumcenter(T)
# Ставимо сусідній до Т трикутник як протилежний до щойно створеного
self.triangles[T] = [tri_op, None, None]
# І навпаки
# Шукаємо сісдній трикутник до протилежного на межі (e1, e0)
if tri_op:
for i, neigh in enumerate(self.triangles[tri_op]):
if neigh:
if e1 in neigh and e0 in neigh:
self.triangles[tri_op][i] = T
new_triangles.append(T)
# Додаємо вказівники нового і попереднього трикутників
N = len(new_triangles)
for i, T in enumerate(new_triangles):
self.triangles[T][1] = new_triangles[(i+1) % N] # наступний
self.triangles[T][2] = new_triangles[(i-1) % N] # попередній
def export_triangles(self):
"""Список трикутників для виводу
"""
return [(a-4, b-4, c-4)
for (a, b, c) in self.triangles if a > 3 and b > 3 and c > 3]
def export_circles(self):
"""Список кіл для виводу
"""
return [(self.circles[(a, b, c)][0], sqrt(self.circles[(a, b, c)][1]))
for (a, b, c) in self.triangles if a > 3 and b > 3 and c > 3]
|
import React from "react"
import renderer from "react-test-renderer"
import Metadata from "."
describe("components/Metadata", (): void => {
it.each([["<EMAIL>", "Foo Bar", "foo bar test", "Foo", "Bar"]])(
"data: %p",
(
author: string,
description: string,
keywords: string,
title: string,
siteTitle: string = "",
): void => {
const tree = renderer.create(
<Metadata
author={author}
description={description}
keywords={keywords}
title={title}
siteTitle={siteTitle}
/>,
)
expect(tree.toJSON()).toMatchSnapshot()
},
)
})
|
import path from 'path';
import webpack from 'webpack';
export default {
devtool: 'cheap-module-eval-source-map',
entry: [
'./src/zoomer',
],
output: {
path: path.join(__dirname, 'static'),
library: 'Zoomer',
libraryTarget: 'umd',
umdNamedDefine: true,
filename: 'zoomer.js',
publicPath: '/',
},
plugins: [
new webpack.HotModuleReplacementPlugin(),
new webpack.NoErrorsPlugin(),
],
eslint: {
configFile: '.eslintrc',
},
module: {
preLoaders: [{
test: /\.js$/,
loaders: ['eslint-loader'],
exclude: ['node_modules'],
}],
loaders: [
{
test: /\.js$/,
loaders: ['babel'],
include: path.join(__dirname, 'src'),
},
],
},
};
|
#!/bin/bash
set -e
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" <<-EOSQL
CREATE USER $API_USER PASSWORD '$API_PASS';
CREATE DATABASE $API_DB;
GRANT ALL PRIVILEGES ON DATABASE $API_DB TO $API_USER;
EOSQL
|
#!/bin/bash
ids_list=ids.conf
images_list=images.conf
sudo docker ps -aq > $ids_list
sudo docker images -q > $images_list
input=$ids_list
while IFS= read -r line
do
sudo docker stop $line
sudo docker rm $line
done < $input
input=$images_list
while IFS= read -r line
do
sudo docker rmi $line
done < $input
sudo docker ps -a
|
#
# Copyright 2016-2019 Crown Copyright
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
add_labels ['public', 'private', 'vis1', 'vis2']
set_auths '<USER>',['public', 'private', 'vis1', 'vis2']
|
package com.donfyy.shrink;
import android.os.Bundle;
import androidx.appcompat.app.AppCompatActivity;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
getResources().getIdentifier("activity_main1","layout",getPackageName());
}
}
|
#!/bin/bash
install -d $PREFIX/bin
install -d $PREFIX/etc
install -d $PREFIX/lib
install -d $PREFIX/scripts
install -d $PREFIX/swift-t
build_dir='dev/build/'
cd ${build_dir}
bash init-settings.sh
# sed 's;SWIFT_T_PREFIX=/tmp/swift-t-install;SWIFT_T_PREFIX='"$PREFIX"'/swift-t;' -i swift-t-settings.sh
# sed 's;ENABLE_PYTHON=0;ENABLE_PYTHON=1;' -i swift-t-settings.sh
# sed 's;PYTHON_EXE="";PYTHON_EXE="'"$PYTHON"'";' -i swift-t-settings.sh
bash build-swift-t.sh
# Setup symlinks for utilities
### BIN ###
cd $PREFIX/bin
for file in stc swift-t helpers.zsh; do
ln -sv ../swift-t/stc/bin/$file .
done
for file in turbine; do
ln -sv ../swift-t/turbine/bin/$file .
done
### ETC ###
cd $PREFIX/etc
for file in stc-config.sh turbine-version.txt; do
ln -sv ../swift-t/stc/etc/$file .
done
ln -sv ../swift-t/turbine/etc/version.txt .
### LIB ###
cd $PREFIX/lib
ln -sv ../swift-t/stc/lib/*.jar .
# A workaround for a missing library
ln -sv libmpi.so libmpi.so.20
### SCRIPTS ###
cd $PREFIX/scripts
for file in turbine-config.sh; do
ln -sv ../swift-t/turbine/scripts/$file .
done
|
create_user_with_ssh_private_key() {
USER=${1}
HOME=/home/${USER}
# create group
groupadd ${USER}
# create user
useradd -k /etc/skel -p NP -m -s /bin/zsh -g ${USER} ${USER}
# setup ssh key
mkdir -p /home/${USER}/.ssh/
ssh-keygen -N "" -t rsa -v -f ${HOME}/.ssh/id_rsa
mv ${HOME}/.ssh/id_rsa.pub ${HOME}/.ssh/authorized_keys
mv ${HOME}/.ssh/id_rsa ./${USER}_user_private_key.txt
# fix permissions
chmod 600 ${HOME}/.ssh/authorized_keys
chmod 700 ${HOME}/.ssh
chown -R ${USER}:${USER} ${HOME}/.ssh
}
export -f create_user_with_ssh_private_key
|
class Hashmap {
constructor() {
this._storage = [];
}
get(key) {
const i = this.hashStr(key);
if (!this._storage[i]) return undefined;
for (let keyVal of this._storage[i]) {
if (keyVal[0] === key) return keyVal[1];
}
}
hashStr(str) {
return str.split('').reduce((a, c) => a + c.charCodeAt(0));
}
set(key, val) {
const i = this.hashStr(key);
if (!this._storage[i]) this._storage[i] = [];
this._storage[i].push([key, val]);
}
}
module.exports = Hashmap;
|
<reponame>ramirobg94/owasp-dependency-check
import sqlalchemy
from flask import Blueprint, current_app, request, jsonify
# from security_dependency_check import Project, celery, AVAILABLE_TASKS
from security_dependency_check import Project, celery
checker_app = Blueprint("checker_app", __name__)
@checker_app.route("/api/v1/project/create", methods=["GET"])
def create():
"""
This end point launch a new analysis and create a new project in the
database.
To launch a new project using command line, you can write:
curl "http://mysite.com/api/v1/check?lang=nodejs&repo=https://github.com/ramirobg94/QuizCore"
Repo example:
https://github.com/ramirobg94/QuizCore
---
tags:
- Analysis
parameters:
- name: lang
in: query
description: >
The language or project code. i.e: nodejs, java, python...
required: true
type: string
default: nodejs
enum:
- nodejs
- name: repo
in: query
description: >
Remote repository address. i.e:
https://github.com/ramirobg94/QuizCore
required: true
type: string
responses:
200:
description: Analysis launched correctly
schema:
id: create_analysis_ok
properties:
project:
type: int
required: true
description: return the project ID
examples:
application/json:
project: 20
400:
schema:
id: create_analysis_invalid_input
properties:
error:
type: string
description: error message
"""
db = current_app.config["DB"]
lang = request.args.get('lang', "nodejs")
repo = request.args.get('repo', None)
if not repo:
return jsonify(error="Invalid repo value"), 400
try:
available_tasks = celery.ODSC_PLUGINS[lang]
except KeyError:
return jsonify(error="Language '{}' not available".format(lang)), 400
# Store project information
try:
project = Project(lang, repo, len(available_tasks))
db.session.add(project)
db.session.commit()
except sqlalchemy.exc.DataError:
# ---------------------------------------------------------------------
# We do that because the primary key of the project is an UUID4
# generated in Python code. The probabilities of a collision with
# another primary key are lower, but, to ensure that we do a second
# round to decrease the collision probabilities
# ---------------------------------------------------------------------
project = Project(lang, repo, len(available_tasks))
db.session.add(project)
db.session.commit()
celery.send_task("core_dispatch", args=(lang, repo, project.id))
return jsonify(project=project.id)
@checker_app.route("/api/v1/project/status/<project_id>", methods=["GET"])
def status(project_id):
"""
Check and return the state and vulnerability of the project
---
tags:
- Analysis
parameters:
- name: project_id
in: path
description: >
The project ID to check state
required: true
type: string
responses:
200:
description: The current status of the analysis
schema:
id: check_project_status_project_check
properties:
scan_status:
type: string
required: true
description: return the project status
enum:
- created
- finished
- running
- non-accessible
total_tests:
type: int
required: true
description: return total test that will be passed
passed_tests:
type: int
required: true
description: return finished test at the moment ot check
examples:
application/json:
project: "finish"
total_tests: 2
passed_tests: 2
application/json:
project: "running"
total_tests: 2
passed_tests: 1
404:
schema:
id: check_project_status_project_not_found
properties:
error:
type: string
description: error message
"""
try:
project = Project.query.filter_by(id=project_id).one()
except sqlalchemy.orm.exc.NoResultFound:
return jsonify(error='Project ID not found'), 404
ret = dict(
scan_status=project.status,
total_tests=project.total_tests,
passed_tests=project.passed_tests
)
return jsonify(ret)
@checker_app.route("/api/v1/project/results/<project_id>", methods=["GET"])
def results(project_id):
"""
Return the results for a project
---
tags:
- Analysis
parameters:
- name: project_id
in: path
description: >
The project ID to get results
required: true
type: string
responses:
200:
description: results returned
schema:
id: project_results_ok
properties:
project_info:
type: list
vulnerabilities:
type: list
examples:
application/json:
project_info:
lang: "nodejs"
passedTest: 1
repo: "https://github.com/ramirobg94/QuizCore"
scan_status: "finished"
vulnerabilities:
- { version: 3.8.8.3, product: sqlite, severity:
Medium, advisory: CVE-2015-6607, description:
"SQLite before 3.8.9, as used in Android before
5.1.1 LMY48T, allows attackers to gain privileges
via a crafted application, aka internal bug 20099586."}
- { version: 10.10.3, product: mac_os_x, severity:
High, advisory: CVE-2015-3717, description:
"Multiple buffer overflows in the printf
functionality in SQLite, as used in Apple iOS before
8.4 and OS X before 10.10.4, allow remote attackers
to execute arbitrary code or cause a denial of
service (application crash) via unspecified vectors."}
404:
schema:
id: project_results_not_found
properties:
error:
type: string
description: error message
"""
VALUES_TO_HIDE_PROJECT = ("id", "total_tests")
VALUES_TO_HIDE_VULNS = ("id", "project_id")
try:
project = Project.query.filter_by(id=project_id).one()
except sqlalchemy.orm.exc.NoResultFound:
return jsonify(error='Project ID not found'), 404
# Load project info
project_info = {
x: y for x, y in project.__dict__.items()
if not x.startswith("_") and x not in VALUES_TO_HIDE_PROJECT
}
# Load vulns
vulnerabilities = [
{y: z for y, z in x.__dict__.items() if not y.startswith("_") and \
y not in VALUES_TO_HIDE_VULNS}
for x in project.vulnerabilities.all()
]
return jsonify(dict(
project_info=project_info,
vulnerabilities=vulnerabilities
))
__all__ = ("checker_app",)
|
<reponame>Solidaric-org/ladenliebe-org
package api
import (
"encoding/json"
"fmt"
"net/http"
"github.com/ManuStoessel/wirvsvirus/backend/entity"
"github.com/gorilla/mux"
log "github.com/sirupsen/logrus"
)
func getImage(w http.ResponseWriter, r *http.Request) {
queries := mux.Vars(r)
w.Header().Set("Content-Type", "application/json")
if id, ok := queries["id"]; ok {
data := &entity.Image{}
data = data.Read(id)
if data != nil {
responseBody, err := json.Marshal(data)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte(`{"error": "error marshalling data"}`))
log.WithFields(log.Fields{
"image": fmt.Sprintf("%+v", data),
}).Error("Unable to marshal image data.")
return
}
w.WriteHeader(http.StatusOK)
w.Write(responseBody)
log.WithFields(log.Fields{
"id": id,
}).Trace("Image found.")
return
}
}
w.WriteHeader(http.StatusNotFound)
w.Write([]byte(`{"error": "not found"}`))
log.WithFields(log.Fields{
"queries": fmt.Sprintf("%+v", queries),
}).Error("Unable to find image.")
}
func updateImage(w http.ResponseWriter, r *http.Request) {
queries := mux.Vars(r)
w.Header().Set("Content-Type", "application/json")
if id, ok := queries["id"]; ok {
data := &entity.Image{}
data = data.Read(id)
if data != nil {
imageToBeUpdated := entity.Image{}
err := json.NewDecoder(r.Body).Decode(&imageToBeUpdated)
if err != nil {
w.WriteHeader(http.StatusBadRequest)
w.Write([]byte(`{"error": "could not parse body as image"}`))
log.WithFields(log.Fields{
"body": fmt.Sprintf("%+v", r.Body),
}).Error("Unable to unmarshal body as image.")
return
}
imageToBeUpdated.ID = id
imageToBeUpdated.Update()
responseBody, err := json.Marshal(imageToBeUpdated)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte(`{"error": "error marshalling data"}`))
log.WithFields(log.Fields{
"image": fmt.Sprintf("%+v", data),
}).Error("Unable to marshal image data.")
return
}
w.WriteHeader(http.StatusOK)
w.Write(responseBody)
log.WithFields(log.Fields{
"id": id,
}).Trace("image updated.")
return
}
}
w.WriteHeader(http.StatusNotFound)
w.Write([]byte(`{"error": "not found"}`))
log.WithFields(log.Fields{
"queries": fmt.Sprintf("%+v", queries),
}).Error("Unable to find Image.")
}
func deleteImage(w http.ResponseWriter, r *http.Request) {
queries := mux.Vars(r)
w.Header().Set("Content-Type", "application/json")
if id, ok := queries["id"]; ok {
data := &entity.Image{}
data = data.Read(id)
if data != nil {
data.Delete()
responseBody, err := json.Marshal(data)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte(`{"error": "error marshalling data"}`))
log.WithFields(log.Fields{
"image": fmt.Sprintf("%+v", data),
}).Error("Unable to marshal image data.")
return
}
w.WriteHeader(http.StatusOK)
w.Write(responseBody)
log.WithFields(log.Fields{
"id": id,
}).Debug("Image deleted.")
return
}
}
w.WriteHeader(http.StatusNotFound)
w.Write([]byte(`{"error": "not found"}`))
log.WithFields(log.Fields{
"queries": fmt.Sprintf("%+v", queries),
}).Error("Unable to find image.")
}
func createImage(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
err := r.ParseForm()
if err != nil {
w.WriteHeader(http.StatusBadRequest)
w.Write([]byte(`{"error": "body not parsed"}`))
log.WithFields(log.Fields{
"body": fmt.Sprintf("%+v", r.Body),
}).Error("Unable to parse body.")
return
}
imageToBeCreated := entity.Image{}
err = json.NewDecoder(r.Body).Decode(&imageToBeCreated)
if err != nil {
w.WriteHeader(http.StatusBadRequest)
w.Write([]byte(`{"error": "could not parse body as image"}`))
log.WithFields(log.Fields{
"body": fmt.Sprintf("%+v", r.Body),
}).Error("Unable to unmarshal body as image.")
return
}
imageToBeCreated.Create()
response, err := json.Marshal(imageToBeCreated)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte(`{"error": "could not marshal image"}`))
log.WithFields(log.Fields{
"image": fmt.Sprintf("%+v", imageToBeCreated),
}).Error("Unable to marshal image as body.")
return
}
w.WriteHeader(http.StatusCreated)
w.Write(response)
log.WithFields(log.Fields{
"image": fmt.Sprintf("%+v", imageToBeCreated),
}).Debug("Image created.")
return
}
func listImages(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
imageList := ImageList{}
image := entity.Image{}
imageList.Images = image.ListAll()
imageList.Count = len(imageList.Images)
responseBody, err := json.Marshal(imageList)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte(`{"error": "error marshalling data"}`))
log.WithFields(log.Fields{
"imagelist": fmt.Sprintf("%+v", imageList),
}).Error("Unable to marshal imagelist data.")
return
}
w.WriteHeader(http.StatusOK)
w.Write(responseBody)
log.WithFields(log.Fields{
"listlength": fmt.Sprintf("%+v", imageList.Count),
}).Trace("Imagelist returned.")
return
}
|
def delete_values(arr, val):
i = 0
size = len(arr)
while i < size:
if arr[i] == val:
arr[i] = arr[size-1]
size-=1
else:
i+=1
# Remaining elements
while size:
arr.pop()
size-=1
return arr
# Driver Code
print(delete_values([2, 3, 3, 4, 4, 5], 3)) |
<reponame>Shanfan/Illustrator-Scripts-Archive
// Tangents From A Point
// draws tangent lines from a selected anchor point to selected curved segments.
// This script tries to find a path with only 1 anchor selected,
// from foreground to background. And specifies the selected point
// of the path as starting point of tangents.
// "the selected curved segments" means rest of the selected paths.
// You can use an isolated point as the starting point.
// In this case, starting isolated point is removed after drawing tangents.
// Drawn tangents have handles at ends.
// So you can move the starting side of anchor point with keeping tangency.
// JavaScript Script for Adobe Illustrator CS3
// Tested with Adobe Illustrator CS3 13.0.3, Windows XP SP2 (Japanese version).
// This script provided "as is" without warranty of any kind.
// Free to use and distribute.
// Copyright(c) 2006-2009 <NAME>
// http://park12.wakwak.com/~shp/lc/et/en_aics_script.html
// 2006-03-29
// 2009-05-23 some refinements
// ------------------------------------------------
var ver10 = (version.indexOf('10') == 0);
function main(){
var s = [];
getPathItemsInSelection(0, s);
if(s.length < 2) return;
activateEditableLayer(s[0]);
// detected anchor point to be starting point
var j, p, tgtItem, tgtIdx, tgt;
var sw, scol;
LOOP: for(var i=0; i<s.length; i++){
if(!sw && s[i].stroked){
sw = s[i].strokeWidth;
scol = s[i].strokeColor;
if(tgt != null) break;
}
if(tgt != null) continue;
p = s[i].pathPoints;
tgtIdx = null;
for(j = 0; j < p.length; j++){
if(isSelected(p[j])){
if(tgtIdx != null) continue LOOP;
tgtIdx = j;
}
}
if(tgtIdx != null){
tgtItem = s[i];
tgt = p[tgtIdx].anchor;
if(s[i].stroked){
sw = s[i].strokeWidth;
scol = s[i].strokeColor;
}
s[i] = null;
}
}
if(!tgt){
alert("Error: Fail to find a point to draw tangents from.\n"
+ " ( This script searches a path which has only one\n"
+ " selected anchor point, and tries to draw tangents\n"
+ " from this point to other selected paths. )");
return;
}
if(! sw){
sw = 1;
scol = col_Gray();
}
var n = drawTangentFromPnt(tgt, s, scol, sw);
if(n > 0){
// remove isolated point
if (tgtItem.pathPoints.length == 1) tgtItem.remove();
} else {
alert("It seems that there's no tangent\n"
+ "which can draw from specified point.");
}
}
// ------------------------------------------------
function drawTangentFromPnt(tgt, s, scol, sw){ // tgt : [x,y], s : Array of PathItems
var conf = {};
// settings =============================================
// if true, tries to draw tangents to the peaks.
conf.include_vertex = false;
// ======================================================
// starting point
xx = tgt[0];
yy = tgt[1];
var i, k, h, bz, ar;
var tarr = [];
for(i = 0; i < s.length; i++){
if(s[i] == null) continue;
p = s[i].pathPoints;
if(p.length<2) continue; // ignores isolated point
for(j = 0; j < p.length; j++){
// draws lines to peaks
if(conf.include_vertex && isSelected(p[j]) && isCorner(p,j)){
ar = [ p[j].anchor[0] - xx,
p[j].anchor[1] - yy ];
if(juufuku_chosa(tgt, ar, tarr)) tarr.push(ar);
}
// ignores not selected segments
k = getIdx(s[i], j + 1);
if(k < 0) break;
if (! sideSelection(p[j], p[k])) continue;
// draws tangents
bz = new Bezier(p, j, k);
bz.mvDat(-xx, -yy);
tarr = getTangent(bz, 0, tgt, tarr);
tarr = getTangent(bz, 3, tgt, tarr);
}
}
// draws tangents
if (tarr.length > 0) {
for(i = 0; i < tarr.length; i++){
p = activeDocument.activeLayer.pathItems.add();
with(p){
setEntirePath([tgt, [tarr[i][0] + xx,
tarr[i][1] + yy]]);
filled = false;
stroked = true;
strokeColor = scol;
strokeWidth = sw;
// extends handles
pathPoints[1].leftDirection = [xx + tarr[i][0] * 0.6,
yy + tarr[i][1] * 0.6];
}
}
}
return tarr.length;
}
// ------------------------------------------------
// searching a tangents
function getTangent(bz, idx, tgt, tarr){
var torelance = 0.00001; //
var slopeLimit = 10000; //
var i, s, t, fd, solution;
var rotFlg = 1;
var f = bz.q[idx].slice(0);
for(i = 0; i <= 10; i++){ // limit of trial time
s = slope(f);
if (s == null || Math.abs(s) > slopeLimit){
bz.xyRot();
f.reverse();
rotFlg *= -1;
continue;
}
t = tBySlope(bz, s, idx);
fd = bz.dv(t);
f = bz.pnt(t);
solution = fd[0] == 0 ? f[1] : f[1] - fd[1] * f[0] / fd[0];
if (Math.abs(solution) < torelance) {
if(t < 0) f = bz.pnt(0);
else if(t > 1) f = bz.pnt(1);
if(rotFlg < 0){
bz.xyRot();
f.reverse();
rotFlg = 1;
}
if(overlap_check(tgt, f, tarr)) tarr.push(f);
return tarr;
}
}
if(rotFlg<0) bz.xyRot();
return tarr;
}
// ------------------------------------------------
// remove duplicated items
function overlap_check(tgt, f, tarr){
// remove a tangent drawn as a point
if(cmpDiff(tgt, f)) return false;
// compare
for(var i = 0; i < tarr.length; i++){
if(cmpDiff(tarr[i], f)) return false;
}
return true;
}
// ------------------------------------------------
function cmpDiff(p1, p2){
var v = 0.01;
return (Math.abs(p1[0] - p2[0]) < v
&& Math.abs(p1[1] - p2[1]) < v);
}
// ------------------------------------------------
function slope(f){
return f[0] == 0 ? null : f[1] / f[0];
}
// ------------------------------------------------
//
function chk_condition_1(q, a, b, c, d){
return ((arrEq(q[a], q[b]))
|| ((q[a][0] == q[b][0] && q[c][0] * q[d][0] < 0)
|| (q[a][1] == q[b][1] && q[c][1] * q[d][1] < 0)));
}
// ------------------------------------------------
//
function tBySlope(bz, k, idx){
var ts = equation2(3 * (bz.y[0] - k * bz.x[0]),
2 * (bz.y[1] - k * bz.x[1]),
bz.y[2] - k * bz.x[2]);
if(ts.length < 1) return;
var t_torelance = 0.001;
var min_t = 0 - t_torelance;
var max_t = 1 + t_torelance;
var t0_invalid = (ts[0] < min_t || ts[0] > max_t);
if(ts.length > 1){
var t1_invalid = (ts[1] < min_t || ts[1] > max_t);
if(t0_invalid && t1_invalid) return;
else if(t0_invalid) return ts[1];
else if(t1_invalid) return ts[0];
else {
if (idx == 0) {
if (chk_condition_1(bz.q, 0, 1, 2, 3)) return Math.max(ts[0], ts[1]);
else Math.min(ts[0], ts[1]);
} else{
if (chk_condition_1(bz.q, 2, 3, 0, 1)) Math.min(ts[0], ts[1]);
else Math.max(ts[0], ts[1]);
}
}
} else {
return t0_invalid ? null : ts[0];
}
}
// ------------------------------------------------
function col_Gray(){
var col = new GrayColor();
col.gray = 100;
if(ver10){
var col2 = new Color();
col2.gray = col;
return col2;
}
return col;
}
// ------------------------------------------------
// solve a quadratic equation ( ax^2+bx+c=0 )
function equation2(a, b, c) {
if(a == 0) return b == 0 ? [] : [-c / b];
a *= 2;
var d = b * b - 2 * a * c;
if(d < 0) return [];
var rd = Math.sqrt(d);
if(d>0) return [(-b + rd) / a, (-b - rd) / a];
else return [-b / a];
}
// ------------------------------------------------
//
function sideSelection(ps1, ps2) {
return (ps1.selected != PathPointSelection.NOSELECTION
&& ps1.selected != PathPointSelection.LEFTDIRECTION
&& ps2.selected != PathPointSelection.NOSELECTION
&& ps2.selected != PathPointSelection.RIGHTDIRECTION);
}
// --------------------------------------
//
function arrEq(arr1, arr2) {
for(var i = 0; i < arr1.length; i++){
if (arr1[i] != arr2[i]) return false;
}
return true;
}
// ------------------------------------------------
//
function fixedTo(n, k){
return ((n - 0).toFixed(k)) - 0;
}
// -----------------------------------------------
function getIdx(pi, n){ // PathItem, number for index
var len = pi.pathPoints.length;
if(pi.closed){
return n >= 0 ? n % len : len - Math.abs(n % len);
} else {
return (n < 0 || n > len - 1) ? -1 : n;
}
}
// ------------------------------------------------
// extract PathItems from the selection which length of PathPoints
// is greater than "n"
function getPathItemsInSelection(n, paths){
if(documents.length < 1) return;
var s = activeDocument.selection;
if (!(s instanceof Array) || s.length < 1) return;
extractPaths(s, n, paths);
}
// --------------------------------------
// extract PathItems from "s" (Array of PageItems -- ex. selection),
// and put them into an Array "paths". If "pp_length_limit" is specified,
// this function extracts PathItems which PathPoints length is greater
// than this number.
function extractPaths(s, pp_length_limit, paths){
for(var i = 0; i < s.length; i++){
if(s[i].typename == "PathItem"
&& !s[i].guides && !s[i].clipping){
if(pp_length_limit
&& s[i].pathPoints.length <= pp_length_limit){
continue;
}
paths.push(s[i]);
} else if(s[i].typename == "GroupItem"){
// search for PathItems in GroupItem, recursively
extractPaths(s[i].pageItems, pp_length_limit, paths);
} else if(s[i].typename == "CompoundPathItem"){
// searches for pathitems in CompoundPathItem, recursively
// ( ### Grouped PathItems in CompoundPathItem are ignored ### )
extractPaths(s[i].pathItems, pp_length_limit , paths);
}
}
}
// ------------------------------------------------
//
function isCorner(p, idx){
var pnt0 = getPntSetsAngleOfTangent(p, idx, -1);
var pnt1 = getPntSetsAngleOfTangent(p, idx, 1);
if(! pnt0 || ! pnt1) return true;
if(pnt0.length < 1 || pnt1.length < 1) return false;
var rad = getRad2(pnt0, p[idx].anchor, pnt1);
if(rad > Math.PI - 0.1) return false;
return true;
}
// --------------------------------------
function getRad(p1,p2) {
return Math.atan2(p2[1] - p1[1],
p2[0] - p1[0]);
}
// ----------------------------------------------
//
function getRad2(p1, o, p2){
var v1 = normalize(p1, o);
var v2 = normalize(p2, o);
return Math.acos(v1[0] * v2[0] + v1[1] * v2[1]);
}
// ------------------------------------------------
function normalize(p, o){
var d = dist(p,o);
return d == 0 ? [0, 0] : [(p[0] - o[0]) / d,
(p[1] - o[1]) / d];
}
// ------------------------------------------------
//
function dist(p1, p2) {
return Math.sqrt(Math.pow(p1[0] - p2[0], 2)
+ Math.pow(p1[1] - p2[1], 2));
}
// ------------------------------------------------
//
function getPntSetsAngleOfTangent(p, idx1, dir){
if(! dir) dir = -1;
var idx2 = getIdx(p.parent, idx1 + dir);
if(idx2 < 0) return null;
var p2 = p[idx2];
with(p[idx1]){
if(dir < 0){
if(arrEq(leftDirection, anchor)){
if(arrEq(p2.anchor, anchor)) return [];
if(arrEq(p2.anchor, p2.rightDirection)
|| arrEq(p2.rightDirection, anchor)) return p2.anchor;
else return p2.rightDirection;
} else {
return leftDirection;
}
} else {
if(arrEq(anchor, rightDirection)){
if(arrEq(anchor, p2.anchor)) return [];
if(arrEq(p2.anchor, p2.leftDirection)
|| arrEq(anchor, p2.leftDirection)) return p2.anchor;
else return p2.leftDirection;
} else {
return rightDirection;
}
}
}
}
// Bezier ================================
var Bezier = function(pp, idx1, idx2){
this.p = pp;
this.p0 = pp[idx1];
this.p1 = pp[idx2];
this.q = [pp[idx1].anchor, pp[idx1].rightDirection,
pp[idx2].leftDirection, pp[idx2].anchor];
this.a0 = this.q[0];
this.r = this.q[1];
this.l = this.q[2];
this.a1 = this.q[3];
this.x = defBezierCoefficients(this.q, 0);
this.y = defBezierCoefficients(this.q, 1);
}
Bezier.prototype = {
pnt : function(t){
return [ t * (t * (this.x[0] * t + this.x[1]) + this.x[2]) + this.x[3],
t * (t * (this.y[0] * t + this.y[1]) + this.y[2]) + this.y[3] ];
},
dv : function(t){
return [ t * (3 * this.x[0] * t + 2 * this.x[1]) + this.x[2],
t * (3 * this.y[0] * t + 2 * this.y[1]) + this.y[2] ];
},
xyRot : function(){
for(var i = 0; i < 4; i++) this.q[i].reverse();
var tmp = this.y.slice(0);
this.y = this.x.slice(0);
this.x = tmp.slice(0);
},
mvDat : function(m, n){
if(m||n){
for(var i=0; i<4; i++){ this.q[i][0] += m; this.q[i][1] += n; }
}
this.a0 = this.q[0]; this.r = this.q[1];
this.l = this.q[2]; this.a1 = this.q[3];
this.x = defBezierCoefficients(this.q, 0);
this.y = defBezierCoefficients(this.q, 1);
}
}
// ------------------------------------------------
function bezierEq(q, t) {
var u = 1 - t;
return [u*u*u * q[0][0] + 3*u*t*(u* q[1][0] + t* q[2][0]) + t*t*t * q[3][0],
u*u*u * q[0][1] + 3*u*t*(u* q[1][1] + t* q[2][1]) + t*t*t * q[3][1]];
}
// ------------------------------------------------
function defBezierCoefficients(q, n){
return [-q[0][n] + 3 * (q[1][n] - q[2][n]) + q[3][n],
3 * (q[0][n] - 2 * q[1][n] + q[2][n]),
3 * (q[1][n] - q[0][n]),
q[0][n]];
}
// -----------------------------------------------
// -----------------------------------------------
function isSelected(p){ // PathPoint
return p.selected == PathPointSelection.ANCHORPOINT;
}
// ----------------------------------------------
function activateEditableLayer(pi){
var lay = activeDocument.activeLayer;
if(lay.locked || !lay.visible) activeDocument.activeLayer = pi.layer;
}
// --------------------------------------
main();
|
#!/usr/bin/env bash
# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -x
set -o errexit
set -o nounset
set -o pipefail
SCRIPT_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)"
source "${SCRIPT_ROOT}/common.sh"
BINARY_DEPS_DIR="$1"
DEP="$2"
ARTIFACTS_BUCKET="$3"
LATEST_TAG="$4"
RELEASE_BRANCH="${5-$(build::eksd_releases::get_release_branch)}"
DEP=${DEP#"$BINARY_DEPS_DIR"}
OS_ARCH="$(cut -d '/' -f1 <<< ${DEP})"
PRODUCT=$(cut -d '/' -f2 <<< ${DEP})
REPO_OWNER=$(cut -d '/' -f3 <<< ${DEP})
REPO=$(cut -d '/' -f4 <<< ${DEP})
S3_ARTIFACTS_FOLDER_OVERRIDE=$(cut -d '/' -f5 <<< ${DEP})
ARCH="$(cut -d '-' -f2 <<< ${OS_ARCH})"
CODEBUILD_CI="${CODEBUILD_CI:-false}"
S3_ARTIFACTS_FOLDER=${S3_ARTIFACTS_FOLDER_OVERRIDE:-$LATEST_TAG}
GIT_COMMIT_OVERRIDE=false
if [ -n "$S3_ARTIFACTS_FOLDER_OVERRIDE" ]; then
GIT_COMMIT_OVERRIDE=true
fi
OUTPUT_DIR_FILE=$BINARY_DEPS_DIR/linux-$ARCH/$PRODUCT/$REPO_OWNER/$REPO
if [[ $REPO == *.tar.gz ]]; then
mkdir -p $(dirname $OUTPUT_DIR_FILE)
else
mkdir -p $OUTPUT_DIR_FILE
fi
if [[ $PRODUCT = 'eksd' ]]; then
if [[ $REPO_OWNER = 'kubernetes' ]]; then
TARBALL="kubernetes-$REPO-linux-$ARCH.tar.gz"
URL=$(build::eksd_releases::get_eksd_kubernetes_asset_url $TARBALL $RELEASE_BRANCH $ARCH)
# these tarballs will extra with the kubernetes/{client,server} folders
OUTPUT_DIR_FILE=$BINARY_DEPS_DIR/linux-$ARCH/$PRODUCT
else
URL=$(build::eksd_releases::get_eksd_component_url $REPO_OWNER $RELEASE_BRANCH $ARCH)
fi
else
TARBALL="$REPO-linux-$ARCH.tar.gz"
URL=$(build::common::get_latest_eksa_asset_url $ARTIFACTS_BUCKET $REPO_OWNER/$REPO $ARCH $S3_ARTIFACTS_FOLDER $GIT_COMMIT_OVERRIDE)
fi
if [ "$CODEBUILD_CI" = "true" ]; then
build::common::wait_for_tarball $URL
fi
if [[ $REPO == *.tar.gz ]]; then
curl -sSL $URL -o $OUTPUT_DIR_FILE
else
curl -sSL $URL | tar xz -C $OUTPUT_DIR_FILE
fi
|
<reponame>domonda/go-sqldb
package db
import (
"context"
"fmt"
"time"
"github.com/domonda/go-sqldb"
)
// SetConn sets the global connection returned by Conn
// if there is no other connection in the context passed to Conn.
func SetConn(c sqldb.Connection) {
if c == nil {
panic("must not set nil sqldb.Connection")
}
conn = c
}
// Conn returns a non nil sqldb.Connection from ctx
// or the global connection set with SetConn.
// The returned connection will use the passed context.
// See sqldb.Connection.WithContext
func Conn(ctx context.Context) sqldb.Connection {
return ConnDefault(ctx, conn)
}
// ConnDefault returns a non nil sqldb.Connection from ctx
// or the passed defaultConn.
// The returned connection will use the passed context.
// See sqldb.Connection.WithContext
func ConnDefault(ctx context.Context, defaultConn sqldb.Connection) sqldb.Connection {
c, _ := ctx.Value(connKey).(sqldb.Connection)
if c == nil {
c = defaultConn
}
if c.Context() == ctx {
return c
}
return c.WithContext(ctx)
}
// ContextWithConn returns a new context with the passed sqldb.Connection
// added as value so it can be retrieved again using Conn(ctx).
// Passing a nil connection causes Conn(ctx)
// to return the global connection set with SetConn.
func ContextWithConn(ctx context.Context, conn sqldb.Connection) context.Context {
return context.WithValue(ctx, connKey, conn)
}
// ContextWithoutCancel returns a new context that inherits
// all values from parent, but not its cancellation state.
func ContextWithoutCancel(parent context.Context) context.Context {
if _, ok := parent.(contextWithoutCancel); ok {
return parent
}
return contextWithoutCancel{parent}
}
type contextWithoutCancel struct {
parent context.Context
}
func (contextWithoutCancel) Deadline() (time.Time, bool) { return time.Time{}, false }
func (contextWithoutCancel) Done() <-chan struct{} { return nil }
func (contextWithoutCancel) Err() error { return nil }
func (c contextWithoutCancel) Value(key interface{}) interface{} { return c.parent.Value(key) }
func (c contextWithoutCancel) String() string { return fmt.Sprintf("%s.WithoutCancel", c.parent) }
|
git config --global credential.helper "cache --timeout=10800"
git commit -a -m cambios
git push
|
/*
* Copyright (C) 2012 Sony Mobile Communications AB
*
* This file is part of ApkAnalyser.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package andreflect.gui.action.injection;
import javax.swing.Icon;
import jerl.bcm.inj.Injection;
import mereflect.MEMethod;
import org.jf.dexlib.Code.Instruction;
import org.jf.dexlib.Code.SingleRegisterInstruction;
import analyser.gui.MainFrame;
import analyser.gui.actions.bytecodemod.AbstractTreeBytecodeModAction;
import analyser.logic.BytecodeModificationMediator;
import analyser.logic.Reference;
import andreflect.DexMethod;
import andreflect.injection.impl.DalvikMethodOffsetExCatch;
public class DalvikMethodOffsetExCatchAction extends AbstractTreeBytecodeModAction {
private static final long serialVersionUID = -7728262540521665990L;
protected static DalvikMethodOffsetExCatchAction m_inst = null;
public static DalvikMethodOffsetExCatchAction getInstance(MainFrame mainFrame)
{
if (m_inst == null)
{
m_inst = new DalvikMethodOffsetExCatchAction("Print exception catch", null);
m_inst.setMainFrame(mainFrame);
}
return m_inst;
}
protected DalvikMethodOffsetExCatchAction(String arg0, Icon arg1)
{
super(arg0, arg1);
}
@Override
protected void modify(MEMethod method) throws Throwable {
if (method.isAbstract()) {
return;
}
DexMethod dexMethod = (DexMethod) method;
if (hasCatch(dexMethod)) {
((MainFrame) getMainFrame()).getMidletTree().findAndMarkNode(method, Reference.MODIFIED);
}
}
@Override
protected Injection getInjection(String className, String methodSignature) {
//not used
return null;
}
public boolean hasCatch(DexMethod method) {
if (method.getEncodedMethod().codeItem == null) {
return false;
}
boolean ret = false;
Instruction[] instructions = method.getEncodedMethod().codeItem.getInstructions();
for (int i = 0; i < instructions.length; i++) {
switch (instructions[i].deodexedInstruction.opcode) {
case MOVE_EXCEPTION:
DalvikMethodOffsetExCatch catchInjection = new DalvikMethodOffsetExCatch(getMethodSignature(method),
instructions[i + 1],
method.getMEClass().getName() + ":" + getMethodSignature(method),
(short) ((SingleRegisterInstruction) instructions[i]).getRegisterA());
BytecodeModificationMediator.getInstance().registerModification(
method.getMEClass().getResource().getContext(),
method.getMEClass(),
catchInjection,
method);
ret = true;
break;
}
}
return ret;
}
} |
#!/bin/bash
cd "$(dirname -- "$(dirname -- "$(readlink -f "$0")")")"
for cmd in black autopep8 isort; do
if [[ ! -x "$(which "$cmd")" ]]; then
echo "Could not find $cmd. Please make sure that black, autopep8, and isort are all installed."
exit 1
fi
done
# Order is important. There are a few things that black and autopep8 disagree on, and I side
# with autopep8 on those.
black director && autopep8 --in-place --recursive director && isort --recursive director
|
# InOrderTraversal
def InOrderTraversal(root, res=[]):
if root is None:
return res
InOrderTraversal(root.left, res)
res.append(root.val)
InOrderTraversal(root.right, res)
return res
# PreOrderTraversal
def PreOrderTraversal(root, res=[]):
if root is None:
return res
res.append(root.val)
PreOrderTraversal(root.left, res)
PreOrderTraversal(root.right, res)
return res
# Sample binary tree node definition
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
# Sample binary tree creation
# Construct a sample binary tree
# 1
# / \
# 2 3
# / \
# 4 5
root = TreeNode(1, TreeNode(2, TreeNode(4), TreeNode(5)), TreeNode(3))
# Perform in-order traversal
in_order_result = InOrderTraversal(root)
print("In order:", in_order_result)
# Perform pre-order traversal
pre_order_result = PreOrderTraversal(root)
print("Pre order:", pre_order_result) |
<filename>src/KSeq.ts
import {Ident, IdentSet, IdentGenerator, LSEQIdentGenerator, Segment} from './idents';
import {AtomList, ArrayAtomList} from './storage';
import {Op, OpKind, InsertOp, RemoveOp} from './Op';
/**
* A CmRDT sequence that supports concurrent simultaneous editing
* while preserving the intention of each edit.
*/
export class KSeq<T> {
/**
* The unique name of this replica.
*/
name: string
/**
* The current logical time.
*/
private time: number
/**
* The ordered list of atoms in the sequence.
*/
private atoms: AtomList<T>
/**
* The set of idents of atoms that have been removed.
*/
private removed: IdentSet
/**
* The generator used to create unique idents for new atoms.
*/
private identGenerator: IdentGenerator
/**
* Creates an instance of KSeq<T>.
* @param name The unique name for this replica.
* @param atoms The backing storage, if null, creates a new ArrayAtomList<T>.
* @param identGenerator The id generator, if null, creates a new LSEQIdentGenerator.
* @returns An instance of KSeq<T>.
*/
constructor(name: string, atoms?: AtomList<T>, identGenerator?: IdentGenerator) {
this.name = name;
this.time = 0;
this.atoms = atoms || new ArrayAtomList<T>();
this.removed = new IdentSet();
this.identGenerator = identGenerator || new LSEQIdentGenerator();
}
/**
* Gets the number of items in the sequence.
* @returns The number of items in the sequence.
*/
size(): number {
return this.atoms.size();
}
/**
* Gets the maximum depth of identifiers in the sequence.
* @returns The depth of the sequence.
*/
depth(): number {
let max = 0;
this.forEach((atom) => {
let depth = atom.id.depth();
if (max < depth) max = depth;
});
return max;
}
/**
* Inserts a value into the sequence at the specified position.
* @param value The value to insert.
* @param pos The position at which to insert the value.
* @returns An InsertOp that can be applied to other KSeqs
* to reproduce the insertion.
*/
insert(value: T, pos: number): InsertOp {
if (pos < 0) throw new RangeError(`The position ${pos} must be greater than or equal to zero.`);
let before = this.atoms.get(pos - 1);
let after = this.atoms.get(pos);
let id = this.identGenerator.getIdent(this.name, ++this.time, (before && before.id), (after && after.id));
let op = new InsertOp(this.name, this.getWallTime(), id, value);
this.apply(op);
return op;
}
/**
* Appends a value to the end of the sequence.
* @param value The value to append.
* @returns An InsertOp that can be applied to other KSeqs
* to reproduce the insertion.
*/
append(value: T): InsertOp {
return this.insert(value, this.size());
}
/**
* Removes the value at the specified position from the sequence.
* @param pos The position of the value to remove.
* @returns An RemoveOp that can be applied to other KSeqs
* to reproduce the removal.
*/
remove(pos: number): RemoveOp {
if (pos < 0) throw new RangeError(`The position ${pos} must be greater than or equal to zero.`);
let atom = this.atoms.get(pos);
if (atom) {
let op = new RemoveOp(this.name, this.getWallTime(), atom.id)
this.apply(op);
return op;
}
return null;
}
/**
* Gets the value at the specified position in the sequence.
* @param pos The desired position.
* @returns The value at that position,
* or undefined if no such value exists.
*/
get(pos: number): T {
const atom = this.atoms.get(pos);
return atom ? atom.value : undefined;
}
/**
* Applies a function to each of the values in the sequence.
* @param func The function to apply.
*/
forEach(func: { (T): void }): void {
this.atoms.forEach((atom) => func(atom.value));
}
/**
* Applies a transformation function to each of the values in the sequence.
* @param func The transformation function to apply.
* @returns An array containing the results of the function calls.
*/
map<R>(func: { (T): R }): R[] {
return this.atoms.map((atom) => func(atom.value));
}
/**
* Converts the sequence to an array.
* @returns An array representation of the values in the sequence.
*/
toArray(): T[] {
return this.atoms.map((atom) => atom.value);
}
/**
* Converts the sequence to a compact object suitable for serialization.
* @returns A serializable object.
*/
toJSON(): Object {
return {
n: this.name,
t: this.time,
s: this.atoms.map((atom) => [atom.id.toString(), atom.value]),
r: this.removed.toJSON()
}
}
/**
* Applies the specified Op to the sequence. This can be used to apply
* operations that have been generated by remote sequences.
* @param op The Op to apply.
*/
apply(op: Op): void {
switch (op.kind) {
case OpKind.Insert:
let insertOp = <InsertOp> op;
// If an atom with the specified ident has already been removed,
// the ops have been received out of order. We should ignore the insert.
if (!this.removed.has(insertOp.id)) {
this.atoms.add(insertOp.id, insertOp.value);
}
break;
case OpKind.Remove:
let removeOp = <RemoveOp> op;
// Ignore repeated remove ops.
if (!this.removed.has(removeOp.id)) {
this.atoms.remove(removeOp.id);
this.removed.add(removeOp.id);
}
break;
default:
throw new Error(`Unknown op kind ${op.kind}`);
}
}
/**
* Gets the current wall time as a UNIX epoch timestamp.
* @returns An integer representing the wall time.
*/
private getWallTime(): number {
return Math.floor(new Date().valueOf() / 1000);
}
} |
#!/bin/bash
# - Update src/manifest.json with the new version number
# - Run the below command
# - Then the file /manifests.json also needs to be updated with the new manifest file
npm run dist && cp publish/org.joplinapp.plugins.RegisterCommandDemo.jpl ~/src/joplin-plugins-test/plugins/org.joplinapp.plugins.RegisterCommandDemo/plugin.jpl && cp publish/org.joplinapp.plugins.RegisterCommandDemo.json ~/src/joplin-plugins-test/plugins/org.joplinapp.plugins.RegisterCommandDemo/plugin.json |
package baggageclaimcmd
import (
"bufio"
"bytes"
"errors"
"fmt"
"io"
"os"
"os/exec"
"syscall"
"code.cloudfoundry.org/lager"
"github.com/concourse/concourse/worker/baggageclaim/fs"
"github.com/concourse/concourse/worker/baggageclaim/kernel"
"github.com/concourse/concourse/worker/baggageclaim/volume"
"github.com/concourse/concourse/worker/baggageclaim/volume/driver"
)
const btrfsFSType = 0x9123683e
func (cmd *BaggageclaimCommand) driver(logger lager.Logger) (volume.Driver, error) {
var fsStat syscall.Statfs_t
err := syscall.Statfs(cmd.VolumesDir.Path(), &fsStat)
if err != nil {
return nil, fmt.Errorf("failed to stat volumes filesystem: %s", err)
}
kernelSupportsOverlay, err := kernel.CheckKernelVersion(4, 0, 0)
if err != nil {
return nil, fmt.Errorf("failed to check kernel version: %s", err)
}
// we don't care about the error here
_ = exec.Command("modprobe", "btrfs").Run()
supportsBtrfs, err := supportsFilesystem("btrfs")
if err != nil {
return nil, fmt.Errorf("failed to detect if btrfs is supported: %s", err)
}
_, err = exec.LookPath(cmd.BtrfsBin)
if err != nil {
supportsBtrfs = false
}
_, err = exec.LookPath(cmd.MkfsBin)
if err != nil {
supportsBtrfs = false
}
if cmd.Driver == "detect" {
if kernelSupportsOverlay {
cmd.Driver = "overlay"
} else if supportsBtrfs {
cmd.Driver = "btrfs"
} else {
cmd.Driver = "naive"
}
}
volumesDir := cmd.VolumesDir.Path()
if cmd.Driver == "btrfs" && uint32(fsStat.Type) != btrfsFSType {
volumesImage := volumesDir + ".img"
filesystem := fs.New(logger.Session("fs"), volumesImage, volumesDir, cmd.MkfsBin)
diskSize := fsStat.Blocks * uint64(fsStat.Bsize)
mountSize := diskSize - (10 * 1024 * 1024 * 1024)
if int64(mountSize) < 0 {
mountSize = diskSize
}
err = filesystem.Create(mountSize)
if err != nil {
return nil, fmt.Errorf("failed to create btrfs filesystem: %s", err)
}
}
if cmd.Driver == "overlay" && !kernelSupportsOverlay {
return nil, errors.New("overlay driver requires kernel version >= 4.0.0")
}
logger.Info("using-driver", lager.Data{"driver": cmd.Driver})
var d volume.Driver
switch cmd.Driver {
case "overlay":
d = driver.NewOverlayDriver(cmd.OverlaysDir)
case "btrfs":
d = driver.NewBtrFSDriver(logger.Session("driver"), cmd.BtrfsBin)
case "naive":
d = &driver.NaiveDriver{}
default:
return nil, fmt.Errorf("unknown driver: %s", cmd.Driver)
}
return d, nil
}
func supportsFilesystem(fs string) (bool, error) {
filesystems, err := os.Open("/proc/filesystems")
if err != nil {
return false, err
}
defer filesystems.Close()
fsio := bufio.NewReader(filesystems)
fsMatch := []byte(fs)
for {
line, _, err := fsio.ReadLine()
if err != nil {
if err == io.EOF {
return false, nil
}
return false, err
}
if bytes.Contains(line, fsMatch) {
return true, nil
}
}
return false, nil
}
|
<gh_stars>1-10
/*
* Copyright 2017 ~ 2025 the original author or authors. <<EMAIL>, <EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.wl4g.devops.dguid.idleaf;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.BatchPreparedStatementSetter;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionTemplate;
import com.wl4g.devops.dguid.leaf.LeafIdSegmentHandler;
/**
* {@link LeafIdSegmentTests}
*
* @author Wangl.sir <<EMAIL>, <EMAIL>>
* @version v1.0 2020年6月15日
* @since
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { "file:src/test/resources/idleaf/app-leaf.xml" })
public class LeafIdSegmentTests {
@Autowired
private LeafIdSegmentHandler segmentHandler;
@Autowired
private JdbcTemplate jdbcTemplate;
private BlockingQueue<Long> queue = new LinkedBlockingQueue<Long>(1000);
@Test
public void synGetId() {
int i = 0;
while (true) {
System.out.println(++i + " :" + segmentHandler.getId());
}
}
@Autowired
private TransactionTemplate transactionTemplate;
public void batchInsert() {
List<Long> list = new ArrayList<Long>(1000);
for (Long i = 0L; i < 1000L; i++) {
list.add(i);
}
try {
TimeUnit.SECONDS.sleep(3);
} catch (InterruptedException e1) {
e1.printStackTrace();
}
try {
final List<Long> insertedList = list;
transactionTemplate.execute(new TransactionCallback<Integer>() {
@Override
public Integer doInTransaction(TransactionStatus status) {
jdbcTemplate.batchUpdate("insert into id_test(p_id) values(?)", new BatchPreparedStatementSetter() {
@Override
public void setValues(PreparedStatement ps, int i) throws SQLException {
Long insertedId = insertedList.get(i);
ps.setLong(1, insertedId);
}
@Override
public int getBatchSize() {
return insertedList.size();
}
});
return insertedList.size();
}
});
System.out.println("oooolk");
} catch (Exception e) {
}
}
public void getId() {
new Thread(() -> {
List<Long> list = new ArrayList<Long>(10000);
while (true) {
try {
Long id = queue.take();
// jdbcTemplate.update("insert into id_test(p_id)
// values(?)",
// l);
// System.out.println("id=" + id);
if (list.size() == 10000) {
final List<Long> insertedList = list;
transactionTemplate.execute(new TransactionCallback<Integer>() {
@Override
public Integer doInTransaction(TransactionStatus status) {
jdbcTemplate.batchUpdate("insert into id_test(p_id) values(?)",
new BatchPreparedStatementSetter() {
@Override
public void setValues(PreparedStatement ps, int i) throws SQLException {
Long insertedId = insertedList.get(i);
ps.setLong(1, insertedId);
}
@Override
public int getBatchSize() {
return insertedList.size();
}
});
return insertedList.size();
}
});
list.clear();
} else {
list.add(id);
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}).start();
int count = 0;
while (true) {
// System.out.println(idLeafService.getId());
try {
queue.put(segmentHandler.getId());
count++;
if (count % 1000 == 0) {
System.out.println("current count no is " + count);
}
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
} |
#!/usr/bin/env bash
#
# Wrapper script for running problemtools directly from within the
# problemtools repo without installing it on the system. When
# installing problemtools on the system properly, this script should
# not be used.
export PYTHONPATH=$(readlink -f $(dirname $0)/..):$PYTHONPATH
exec python2 -m problemtools.problem2pdf $@
|
#!/bin/bash
# ================================================
# Function to check and request sudo permission
#
# @usage
# needsSudoPermission
# ================================================
needsSudoPermission() {
# Check if we have sudo access
if ! hasSudo
then
# First, check for an existing password
if [[ $(echoOption 'sudopw') != false ]]
then
sudoPw=$(echoOption 'sudopw')
fi
# Check the sudo password
sudoChecked=false
if ! stringIsEmptyOrNull ${sudoPw}
then
# Try to activate sudo access
activateSudo ${sudoPw}
# Check if we have sudo access
if hasSudo
then
sudoChecked=true
else
sudoPw=null
fi
fi
# Check if we already have sudo access
if ! ${sudoChecked}
then
# Check if we have sudo access
if stringIsEmptyOrNull ${sudoPw}
then
# Show the sudo dialog
showSudoDialog
fi
# Check if we have sudo access
if ! hasSudo
then
dumpError "Script needs to be run as root user, with sudo permission or the correct sudo password"
exitScript
fi
fi
fi
# Dump the status
dd "User '${USER}' has root permissions"
}
# ================================================
# Function to activate sudo permissions
#
# @usage
# activateSudo 'PASSWORD'
# ================================================
activateSudo() {
dd "Activating sudo with password '$1'"
try
(
(echo $1 | sudo -S ls >/dev/null 2>&1) || throw 100
)
catch || {
return
}
}
# ================================================
# Function to check if the script has sudo permissions
#
# @usage
# hasSudo
# ================================================
hasSudo() {
try
(
if [[ ${USER} == 'root' ]]
then
dd "Testing for sudo. Result is true [#1]"
return 0
elif $(sudo -n true 2>/dev/null || throw 100)
then
dd "Testing for sudo. Result is true [#2]"
return 0
else
throw 100
fi
)
catch || {
dd "Testing for sudo. Result is false"
return 1
}
}
|
#The command below this text says hello world.
echo hello world!
|
void readRegister(int memoryBank, int memoryAddress, int* output) {
// Perform read operation from the specified memory bank and address
// and store the data in the output array
// Pseudocode: output = memory[memoryBank][memoryAddress]
*output = memory[memoryBank][memoryAddress];
}
void writeRegister(int memoryBank, int memoryAddress, int data) {
// Perform write operation to the specified memory bank and address
// with the data from the bitData register
// Pseudocode: memory[memoryBank][memoryAddress] = data
memory[memoryBank][memoryAddress] = data;
} |
#!/bin/bash
# Copyright
# 2018 Johns Hopkins University (Author: Jesus Villalba)
# Apache 2.0.
#
. ./cmd.sh
. ./path.sh
set -e
stage=1
config_file=default_config.sh
use_gpu=false
xvec_chunk_length=12800
ft=0
. parse_options.sh || exit 1;
. $config_file
if [ "$use_gpu" == "true" ];then
xvec_args="--use-gpu true --chunk-length $xvec_chunk_length"
xvec_cmd="$cuda_eval_cmd"
else
xvec_cmd="$train_cmd"
fi
if [ $ft -eq 1 ];then
nnet_name=$ft_nnet_name
nnet=$ft_nnet
elif [ $ft -eq 2 ];then
nnet_name=$ft2_nnet_name
nnet=$ft2_nnet
elif [ $ft -eq 3 ];then
nnet_name=$ft3_nnet_name
nnet=$ft3_nnet
fi
xvector_dir=exp/xvectors/$nnet_name
if [ $stage -le 1 ]; then
# Extract xvectors for training LDA/PLDA
for name in sre_tel
do
if [ $plda_num_augs -eq 0 ];then
steps_xvec/extract_xvectors_from_wav.sh \
--cmd "$xvec_cmd --mem 12G" --nj 100 ${xvec_args} \
--random-utt-length true --min-utt-length 1000 --max-utt-length 6000 \
--feat-config $feat_config \
$nnet data/${name} \
$xvector_dir/${name}
else
steps_xvec/extract_xvectors_from_wav.sh \
--cmd "$xvec_cmd --mem 12G" --nj 100 ${xvec_args} \
--random-utt-length true --min-utt-length 1000 --max-utt-length 6000 \
--feat-config $feat_config --aug-config $plda_aug_config --num-augs $plda_num_augs \
$nnet data/${name} \
$xvector_dir/${name}_augx${plda_num_augs} \
data/${name}_augx${plda_num_augs}
fi
done
fi
if [ $stage -le 2 ]; then
# Extract xvectors for adapting LDA/PLDA
for name in sre18_cmn2_adapt_lab
do
if [ $plda_num_augs -eq 0 ];then
steps_xvec/extract_xvectors_from_wav.sh \
--cmd "$xvec_cmd --mem 12G" --nj 30 ${xvec_args} \
--feat-config $feat_config \
$nnet data/${name} \
$xvector_dir/${name}
else
steps_xvec/extract_xvectors_from_wav.sh \
--cmd "$xvec_cmd --mem 12G" --nj 100 ${xvec_args} \
--feat-config $feat_config --aug-config $plda_aug_config --num-augs $plda_num_augs \
$nnet data/${name} \
$xvector_dir/${name}_augx${plda_num_augs} \
data/${name}_augx${plda_num_augs}
fi
done
fi
if [ $stage -le 3 ]; then
# Extracts x-vectors for evaluation
for name in sre18_dev_unlabeled \
sre18_eval40_enroll_cmn2 sre18_eval40_test_cmn2 \
sre19_eval_enroll_cmn2 sre19_eval_test_cmn2
do
num_spk=$(wc -l data/$name/spk2utt | awk '{ print $1}')
nj=$(($num_spk < 100 ? $num_spk:100))
steps_xvec/extract_xvectors_from_wav.sh --cmd "$xvec_cmd --mem 6G" --nj $nj ${xvec_args} \
--feat-config $feat_config \
$nnet data/$name \
$xvector_dir/$name
done
fi
if [ $stage -le 4 ]; then
mkdir -p $xvector_dir/sre18_eval40_cmn2
cat $xvector_dir/sre18_eval40_{enroll,test}_cmn2/xvector.scp > $xvector_dir/sre18_eval40_cmn2/xvector.scp
mkdir -p $xvector_dir/sre19_eval_cmn2
cat $xvector_dir/sre19_eval_{enroll,test}_cmn2/xvector.scp > $xvector_dir/sre19_eval_cmn2/xvector.scp
fi
exit
|
<gh_stars>10-100
from __future__ import print_function
if 1:
# deal with old files, forcing to numpy
import tables.flavor
tables.flavor.restrict_flavors(keep=["numpy"])
import numpy
import sys, os
import flydra_analysis.a2.core_analysis as core_analysis
import argparse
import flydra_analysis.analysis.flydra_analysis_convert_to_mat
import flydra_core.kalman.dynamic_models as dynamic_models
import tables
import flydra_analysis.analysis.result_utils as result_utils
import flydra_analysis.a2.utils as utils
from flydra_analysis.a2.orientation_ekf_fitter import compute_ori_quality
import warnings
def cam_id2hostname(cam_id, h5_context):
cam_info = h5_context.get_pytable_node("cam_info", from_2d_file=True)
ci = cam_info[:]
if "hostname" in ci.dtype.names:
# new style
cond = ci["cam_id"] == cam_id
rows = ci[cond]
if len(rows) != 1:
raise ValueError("multiple/no hostnames for observation: %s" % (rows,))
hostname = rows["hostname"][0]
else:
# old style
hostname = "_".join(cam_id.split("_")[:-1])
return hostname
def convert(
infilename,
outfilename,
frames_per_second=None,
save_timestamps=True,
file_time_data=None,
do_nothing=False, # set to true to test for file existance
start_obj_id=None,
stop_obj_id=None,
obj_only=None,
dynamic_model_name=None,
hdf5=False,
show_progress=False,
show_progress_json=False,
**kwargs
):
if start_obj_id is None:
start_obj_id = -numpy.inf
if stop_obj_id is None:
stop_obj_id = numpy.inf
smoothed_data_filename = os.path.split(infilename)[1]
raw_data_filename = smoothed_data_filename
ca = core_analysis.get_global_CachingAnalyzer()
with ca.kalman_analysis_context(
infilename, data2d_fname=file_time_data
) as h5_context:
extra_vars = {}
tzname = None
if save_timestamps:
print("STAGE 1: finding timestamps")
table_kobs = h5_context.get_pytable_node("ML_estimates")
tzname = h5_context.get_tzname0()
fps = h5_context.get_fps()
try:
table_data2d = h5_context.get_pytable_node(
"data2d_distorted", from_2d_file=True
)
except tables.exceptions.NoSuchNodeError as err:
print(
"No timestamps in file. Either specify not to save timestamps ('--no-timestamps') or specify the original .h5 file with the timestamps ('--time-data=FILE2D')",
file=sys.stderr,
)
sys.exit(1)
print("caching raw 2D data...", end=" ")
sys.stdout.flush()
table_data2d_frames = table_data2d.read(field="frame")
assert numpy.max(table_data2d_frames) < 2 ** 63
table_data2d_frames = table_data2d_frames.astype(numpy.int64)
# table_data2d_frames_find = fastsearch.binarysearch.BinarySearcher( table_data2d_frames )
table_data2d_frames_find = utils.FastFinder(table_data2d_frames)
table_data2d_camns = table_data2d.read(field="camn")
table_data2d_timestamps = table_data2d.read(field="timestamp")
print("done")
print(
"(cached index of %d frame values of dtype %s)"
% (len(table_data2d_frames), str(table_data2d_frames.dtype))
)
drift_estimates = h5_context.get_drift_estimates()
camn2cam_id, cam_id2camns = h5_context.get_caminfo_dicts()
gain = {}
offset = {}
print("hostname time_gain time_offset")
print("-------- --------- -----------")
for i, hostname in enumerate(drift_estimates.get("hostnames", [])):
tgain, toffset = result_utils.model_remote_to_local(
drift_estimates["remote_timestamp"][hostname][::10],
drift_estimates["local_timestamp"][hostname][::10],
)
gain[hostname] = tgain
offset[hostname] = toffset
print(" ", repr(hostname), tgain, toffset)
print()
if do_nothing:
return
print("caching Kalman obj_ids...")
obs_obj_ids = table_kobs.read(field="obj_id")
fast_obs_obj_ids = utils.FastFinder(obs_obj_ids)
print("finding unique obj_ids...")
unique_obj_ids = numpy.unique(obs_obj_ids)
print("(found %d)" % (len(unique_obj_ids),))
unique_obj_ids = unique_obj_ids[unique_obj_ids >= start_obj_id]
unique_obj_ids = unique_obj_ids[unique_obj_ids <= stop_obj_id]
if obj_only is not None:
unique_obj_ids = numpy.array(
[oid for oid in unique_obj_ids if oid in obj_only]
)
print("filtered to obj_only", obj_only)
print("(will export %d)" % (len(unique_obj_ids),))
print("finding 2d data for each obj_id...")
timestamp_time = numpy.zeros(unique_obj_ids.shape, dtype=numpy.float64)
table_kobs_frame = table_kobs.read(field="frame")
if len(table_kobs_frame) == 0:
raise ValueError("no 3D data, cannot convert")
assert numpy.max(table_kobs_frame) < 2 ** 63
table_kobs_frame = table_kobs_frame.astype(numpy.int64)
assert (
table_kobs_frame.dtype == table_data2d_frames.dtype
) # otherwise very slow
all_idxs = fast_obs_obj_ids.get_idx_of_equal(unique_obj_ids)
for obj_id_enum, obj_id in enumerate(unique_obj_ids):
idx0 = all_idxs[obj_id_enum]
framenumber = table_kobs_frame[idx0]
remote_timestamp = numpy.nan
this_camn = None
frame_idxs = table_data2d_frames_find.get_idxs_of_equal(framenumber)
if len(frame_idxs):
frame_idx = frame_idxs[0]
this_camn = table_data2d_camns[frame_idx]
remote_timestamp = table_data2d_timestamps[frame_idx]
if this_camn is None:
print(
"skipping frame %d (obj %d): no data2d_distorted data"
% (framenumber, obj_id)
)
continue
cam_id = camn2cam_id[this_camn]
try:
remote_hostname = cam_id2hostname(cam_id, h5_context)
except ValueError as e:
print("error getting hostname of cam: %s" % e.message)
continue
if remote_hostname not in gain:
warnings.warn(
"no host %s in timestamp data. making up "
"data." % remote_hostname
)
gain[remote_hostname] = 1.0
offset[remote_hostname] = 0.0
mainbrain_timestamp = (
remote_timestamp * gain[remote_hostname] + offset[remote_hostname]
) # find mainbrain timestamp
timestamp_time[obj_id_enum] = mainbrain_timestamp
extra_vars["obj_ids"] = unique_obj_ids
extra_vars["timestamps"] = timestamp_time
print("STAGE 2: running Kalman smoothing operation")
# also save the experiment data if present
uuid = None
try:
table_experiment = h5_context.get_pytable_node(
"experiment_info", from_2d_file=True
)
except tables.exceptions.NoSuchNodeError:
pass
else:
try:
uuid = table_experiment.read(field="uuid")
except (KeyError, tables.exceptions.HDF5ExtError):
pass
else:
extra_vars["experiment_uuid"] = uuid
recording_header = h5_context.read_textlog_header_2d()
recording_flydra_version = recording_header["flydra_version"]
# -----------------------------------------------
obj_ids = h5_context.get_unique_obj_ids()
smoothing_flydra_version = h5_context.get_extra_info()["header"][
"flydra_version"
]
obj_ids = obj_ids[obj_ids >= start_obj_id]
obj_ids = obj_ids[obj_ids <= stop_obj_id]
if obj_only is not None:
obj_ids = numpy.array(obj_only)
print("filtered to obj_only", obj_ids)
if frames_per_second is None:
frames_per_second = h5_context.get_fps()
if dynamic_model_name is None:
extra = h5_context.get_extra_info()
orig_dynamic_model_name = extra.get("dynamic_model_name", None)
dynamic_model_name = orig_dynamic_model_name
if dynamic_model_name is None:
dynamic_model_name = dynamic_models.DEFAULT_MODEL
warnings.warn(
'no dynamic model specified, using "%s"' % dynamic_model_name
)
else:
print(
'detected file loaded with dynamic model "%s"' % dynamic_model_name
)
if dynamic_model_name.startswith("EKF "):
dynamic_model_name = dynamic_model_name[4:]
print(' for smoothing, will use dynamic model "%s"' % dynamic_model_name)
allrows = []
allqualrows = []
failed_quality = False
if show_progress:
import progressbar
class StringWidget(progressbar.Widget):
def set_string(self, ts):
self.ts = ts
def update(self, pbar):
if hasattr(self, "ts"):
return self.ts
else:
return ""
string_widget = StringWidget()
objs_per_sec_widget = progressbar.FileTransferSpeed(unit="obj_ids ")
widgets = [
string_widget,
objs_per_sec_widget,
progressbar.Percentage(),
progressbar.Bar(),
progressbar.ETA(),
]
pbar = progressbar.ProgressBar(widgets=widgets, maxval=len(obj_ids)).start()
for i, obj_id in enumerate(obj_ids):
if obj_id > stop_obj_id:
break
if show_progress:
string_widget.set_string("[obj_id: % 5d]" % obj_id)
pbar.update(i)
if show_progress_json and i % 100 == 0:
rough_percent_done = float(i) / len(obj_ids) * 100.0
result_utils.do_json_progress(rough_percent_done)
try:
rows = h5_context.load_data(
obj_id,
dynamic_model_name=dynamic_model_name,
frames_per_second=frames_per_second,
**kwargs
)
except core_analysis.DiscontiguousFramesError:
warnings.warn(
"discontiguous frames smoothing obj_id %d, skipping." % (obj_id,)
)
continue
except core_analysis.NotEnoughDataToSmoothError:
# warnings.warn('not enough data to smooth obj_id %d, skipping.'%(obj_id,))
continue
except numpy.linalg.linalg.LinAlgError:
warnings.warn(
"linear algebra error smoothing obj_id %d, skipping." % (obj_id,)
)
continue
except core_analysis.CouldNotCalculateOrientationError:
warnings.warn(
"orientation error smoothing obj_id %d, skipping." % (obj_id,)
)
continue
allrows.append(rows)
try:
qualrows = compute_ori_quality(
h5_context, rows["frame"], obj_id, smooth_len=0
)
allqualrows.append(qualrows)
except ValueError:
failed_quality = True
if show_progress:
pbar.finish()
allrows = numpy.concatenate(allrows)
if not failed_quality:
allqualrows = numpy.concatenate(allqualrows)
else:
allqualrows = None
recarray = numpy.rec.array(allrows)
smoothed_source = "kalman_estimates"
flydra_analysis.analysis.flydra_analysis_convert_to_mat.do_it(
rows=recarray,
ignore_observations=True,
newfilename=outfilename,
extra_vars=extra_vars,
orientation_quality=allqualrows,
hdf5=hdf5,
tzname=tzname,
fps=fps,
smoothed_source=smoothed_source,
smoothed_data_filename=smoothed_data_filename,
raw_data_filename=raw_data_filename,
dynamic_model_name=orig_dynamic_model_name,
recording_flydra_version=recording_flydra_version,
smoothing_flydra_version=smoothing_flydra_version,
)
if show_progress_json:
result_utils.do_json_progress(100)
def export_flydra_hdf5():
main(hdf5_only=True)
def main(hdf5_only=False):
# hdf5_only is to maintain backwards compatibility...
parser = argparse.ArgumentParser()
if hdf5_only:
dest_help = "filename of output .h5 file"
else:
dest_help = "filename of output .mat file"
parser.add_argument("file", type=str, default=None, help="input file")
parser.add_argument(
"--progress",
dest="show_progress",
action="store_true",
default=False,
help="show progress bar on console",
)
parser.add_argument(
"--progress-json",
dest="show_progress_json",
action="store_true",
default=False,
help="show JSON progress messages",
)
parser.add_argument("--dest-file", type=str, default=None, help=dest_help)
parser.add_argument(
"--time-data",
dest="file2d",
type=str,
help="hdf5 file with 2d data FILE2D used to calculate timestamp information and take UUID",
metavar="FILE2D",
)
parser.add_argument(
"--no-timestamps", action="store_true", dest="no_timestamps", default=False
)
if not hdf5_only:
parser.add_argument(
"--hdf5",
action="store_true",
default=False,
help="save output as .hdf5 file (not .mat)",
)
parser.add_argument(
"--start-obj-id", default=None, type=int, help="last obj_id to save"
)
parser.add_argument(
"--stop-obj-id", default=None, type=int, help="last obj_id to save"
)
parser.add_argument("--obj-only", type=str)
parser.add_argument(
"--stop", default=None, type=int, help="last obj_id to save (DEPRECATED)"
)
parser.add_argument("--profile", action="store_true", dest="profile", default=False)
parser.add_argument(
"--dynamic-model", type=str, dest="dynamic_model", default=None,
)
core_analysis.add_arguments_to_parser(parser)
options = parser.parse_args()
if options.stop_obj_id is not None and options.stop is not None:
raise ValueError("--stop and --stop-obj-id cannot both be set")
if options.obj_only is not None:
options.obj_only = core_analysis.parse_seq(options.obj_only)
if options.start_obj_id is not None or options.stop_obj_id is not None:
raise ValueError("cannot specify start and stop with --obj-only option")
if options.stop is not None:
warnings.warn(
"DeprecationWarning: --stop will be phased out in favor of --stop-obj-id"
)
options.stop_obj_id = options.stop
if hdf5_only:
do_hdf5 = True
else:
do_hdf5 = options.hdf5
infilename = options.file
if options.dest_file is None:
if do_hdf5:
# import h5py early so if we don't have it we know sooner rather than later.
import h5py
outfilename = os.path.splitext(infilename)[0] + "_smoothed.h5"
else:
outfilename = os.path.splitext(infilename)[0] + "_smoothed.mat"
else:
outfilename = options.dest_file
kwargs = core_analysis.get_options_kwargs(options)
if options.profile:
import cProfile
out_stats_filename = outfilename + ".profile"
print("profiling, stats will be saved to %r" % out_stats_filename)
cProfile.runctx(
"""convert(infilename,outfilename,
file_time_data=options.file2d,
save_timestamps = not options.no_timestamps,
start_obj_id=options.start_obj_id,
stop_obj_id=options.stop_obj_id,
obj_only=options.obj_only,
dynamic_model_name=options.dynamic_model,
return_smoothed_directions = True,
hdf5 = do_hdf5,
show_progress = options.show_progress,
show_progress_json=options.show_progress_json,
**kwargs)""",
globals(),
locals(),
out_stats_filename,
)
else:
convert(
infilename,
outfilename,
file_time_data=options.file2d,
save_timestamps=not options.no_timestamps,
start_obj_id=options.start_obj_id,
stop_obj_id=options.stop_obj_id,
obj_only=options.obj_only,
dynamic_model_name=options.dynamic_model,
return_smoothed_directions=True,
hdf5=do_hdf5,
show_progress=options.show_progress,
show_progress_json=options.show_progress_json,
**kwargs
)
if __name__ == "__main__":
main()
|
device=$1
boot_dir=`mktemp -d /tmp/BOOT.XXXXXXXXXX`
root_dir=`mktemp -d /tmp/ROOT.XXXXXXXXXX`
linux_dir=tmp/linux-5.10
linux_ver=5.10.46-xilinx
root_tar=ubuntu-base-20.04.2-base-armhf.tar.gz
root_url=http://cdimage.ubuntu.com/ubuntu-base/releases/20.04/release/$root_tar
passwd=escondido
timezone=America/Argentina/Mendoza
# Create partitions
parted -s $device mklabel msdos
parted -s $device mkpart primary fat16 4MiB 16MiB
parted -s $device mkpart primary ext4 16MiB 100%
boot_dev=/dev/`lsblk -ln -o NAME -x NAME $device | sed '2!d'`
root_dev=/dev/`lsblk -ln -o NAME -x NAME $device | sed '3!d'`
# Create file systems
mkfs.vfat -v $boot_dev
mkfs.ext4 -F -j $root_dev
# Mount file systems
mount $boot_dev $boot_dir
mount $root_dev $root_dir
# Copy files to the boot file system
cp boot.bin devicetree.dtb uImage $boot_dir
cp uEnv-ext4.txt $boot_dir/uEnv.txt
# Copy Ubuntu Core to the root file system
test -f $root_tar || curl -L $root_url -o $root_tar
tar -zxf $root_tar --directory=$root_dir
# Install Linux modules
modules_dir=$root_dir/lib/modules/$linux_ver
mkdir -p $modules_dir/kernel
find $linux_dir -name \*.ko -printf '%P\0' | tar --directory=$linux_dir --owner=0 --group=0 --null --files-from=- -zcf - | tar -zxf - --directory=$modules_dir/kernel
cp $linux_dir/modules.order $linux_dir/modules.builtin $modules_dir/
depmod -a -b $root_dir $linux_ver
# Add missing configuration files and packages
cp /etc/resolv.conf $root_dir/etc/
cp /usr/bin/qemu-arm-static $root_dir/usr/bin/
chroot $root_dir <<- EOF_CHROOT
export LANG=C
export LC_ALL=C
export PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
cat <<- EOF_CAT > etc/apt/apt.conf.d/99norecommends
APT::Install-Recommends "0";
APT::Install-Suggests "0";
EOF_CAT
cat <<- EOF_CAT > etc/fstab
# /etc/fstab: static file system information.
# <file system> <mount point> <type> <options> <dump> <pass>
/dev/mmcblk0p2 / ext4 errors=remount-ro 0 1
/dev/mmcblk0p1 /boot vfat defaults 0 2
EOF_CAT
cat <<- EOF_CAT >> etc/securetty
# Serial Console for Xilinx Zynq-7000
ttyPS0
EOF_CAT
sed 's/tty1/ttyPS0/g; s/38400/115200/' etc/init/tty1.conf > etc/init/ttyPS0.conf
echo red-pitaya > etc/hostname
sed -i '/^# deb .* universe$/s/^# //' etc/apt/sources.list
sed -i '/### END INIT INFO/aexit 0' /etc/init.d/udev
apt-get update
apt-get -y upgrade
sed -i '/### END INIT INFO/{n;d}' /etc/init.d/udev
apt-get -y install locales
locale-gen en_US.UTF-8
update-locale LANG=en_US.UTF-8
echo $timezone > etc/timezone
dpkg-reconfigure --frontend=noninteractive tzdata
apt-get -y install openssh-server ca-certificates ntp usbutils psmisc lsof \
parted curl less vim man-db iw wpasupplicant linux-firmware ntfs-3g
sed -i 's/^PermitRootLogin.*/PermitRootLogin yes/' etc/ssh/sshd_config
apt-get -y install hostapd isc-dhcp-server iptables
touch etc/udev/rules.d/75-persistent-net-generator.rules
cat <<- EOF_CAT >> etc/network/interfaces.d/eth0
allow-hotplug eth0
iface eth0 inet dhcp
EOF_CAT
cat <<- EOF_CAT > etc/network/interfaces.d/wlan0
allow-hotplug wlan0
iface wlan0 inet static
address 192.168.42.1
netmask 255.255.255.0
post-up service hostapd restart
post-up service isc-dhcp-server restart
post-up iptables-restore < /etc/iptables.ipv4.nat
pre-down iptables-restore < /etc/iptables.ipv4.nonat
pre-down service isc-dhcp-server stop
pre-down service hostapd stop
EOF_CAT
cat <<- EOF_CAT > etc/hostapd/hostapd.conf
interface=wlan0
ssid=RedPitaya
driver=nl80211
hw_mode=g
channel=6
macaddr_acl=0
auth_algs=1
ignore_broadcast_ssid=0
wpa=2
wpa_passphrase=RedPitaya
wpa_key_mgmt=WPA-PSK
wpa_pairwise=CCMP
rsn_pairwise=CCMP
EOF_CAT
cat <<- EOF_CAT > etc/default/hostapd
DAEMON_CONF=/etc/hostapd/hostapd.conf
EOF_CAT
cat <<- EOF_CAT > etc/dhcp/dhcpd.conf
ddns-update-style none;
default-lease-time 600;
max-lease-time 7200;
authoritative;
log-facility local7;
subnet 192.168.42.0 netmask 255.255.255.0 {
range 192.168.42.10 192.168.42.50;
option broadcast-address 192.168.42.255;
option routers 192.168.42.1;
default-lease-time 600;
max-lease-time 7200;
option domain-name "local";
option domain-name-servers 8.8.8.8, 8.8.4.4;
}
EOF_CAT
cat <<- EOF_CAT >> etc/dhcp/dhclient.conf
timeout 20;
lease {
interface "eth0";
fixed-address 192.168.1.100;
option subnet-mask 255.255.255.0;
renew 2 2030/1/1 00:00:01;
rebind 2 2030/1/1 00:00:01;
expire 2 2030/1/1 00:00:01;
}
EOF_CAT
sed -i '/^#net.ipv4.ip_forward=1$/s/^#//' etc/sysctl.conf
cat <<- EOF_CAT > etc/iptables.ipv4.nat
*nat
:PREROUTING ACCEPT [0:0]
:INPUT ACCEPT [0:0]
:OUTPUT ACCEPT [0:0]
:POSTROUTING ACCEPT [0:0]
-A POSTROUTING -o eth0 -j MASQUERADE
COMMIT
*mangle
:PREROUTING ACCEPT [0:0]
:INPUT ACCEPT [0:0]
:FORWARD ACCEPT [0:0]
:OUTPUT ACCEPT [0:0]
:POSTROUTING ACCEPT [0:0]
COMMIT
*filter
:INPUT ACCEPT [0:0]
:FORWARD ACCEPT [0:0]
:OUTPUT ACCEPT [0:0]
-A FORWARD -i eth0 -o wlan0 -m state --state RELATED,ESTABLISHED -j ACCEPT
-A FORWARD -i wlan0 -o eth0 -j ACCEPT
COMMIT
EOF_CAT
cat <<- EOF_CAT > etc/iptables.ipv4.nonat
*nat
:PREROUTING ACCEPT [0:0]
:INPUT ACCEPT [0:0]
:OUTPUT ACCEPT [0:0]
:POSTROUTING ACCEPT [0:0]
COMMIT
*mangle
:PREROUTING ACCEPT [0:0]
:INPUT ACCEPT [0:0]
:FORWARD ACCEPT [0:0]
:OUTPUT ACCEPT [0:0]
:POSTROUTING ACCEPT [0:0]
COMMIT
*filter
:INPUT ACCEPT [0:0]
:FORWARD ACCEPT [0:0]
:OUTPUT ACCEPT [0:0]
COMMIT
EOF_CAT
apt-get clean
echo root:$passwd | chpasswd
service ntp stop
service ssh stop
history -c
sync
EOF_CHROOT
rm $root_dir/etc/resolv.conf
rm $root_dir/usr/bin/qemu-arm-static
# Unmount file systems
umount $boot_dir $root_dir
rmdir $boot_dir $root_dir
zerofree $root_dev
|
<reponame>SammyVimes/ignite-3
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.raft.client.message.impl;
import org.apache.ignite.raft.client.Command;
import org.apache.ignite.raft.client.message.ActionRequest;
/** */
class ActionRequestImpl<T> implements ActionRequest, ActionRequest.Builder {
/** */
private Command cmd;
/** */
private String groupId;
/** {@inheritDoc} */
@Override public Command command() {
return cmd;
}
/** {@inheritDoc} */
@Override public String groupId() {
return groupId;
}
/** {@inheritDoc} */
@Override public Builder groupId(String groupId) {
this.groupId = groupId;
return this;
}
/** {@inheritDoc} */
@Override public Builder command(Command cmd) {
this.cmd = cmd;
return this;
}
/** {@inheritDoc} */
@Override public ActionRequest build() {
return this;
}
/** {@inheritDoc} */
@Override public short directType() {
return 1000;
}
}
|
#!/bin/sh
INSTANCE=$1
ROOM=$2
HOST=$3
JVMMS=$4
JVMMX=$5
VOXXR_HOME=~/dev/wkspace/voxxr/voxxr
SEED_IP=`cat $VOXXR_HOME/voxxr-room/current-seed-ip`
cd $VOXXR_HOME/voxxr-room && ant deps && cd -
cd $VOXXR_HOME && tar czvf out/production/voxxr-room.tgz voxxr-room && cd -
echo "starting instance"
ovhcloud instance startInstance --instanceId $INSTANCE
echo "waiting for room node $ROOM to startup..."
ROOM_IP=''
while [ "$ROOM_IP" = "" ]; do
ROOM_IP=`ovhcloud instance getInstances --projectName voxxr | grep $INSTANCE | grep 'running' | egrep -o '([0-9]+\.)+[0-9]+'`;
done
echo "login and RSA accept"
ovhcloud instance ssh $INSTANCE ls
echo "uploading files..."
ovhcloud instance ssh $INSTANCE copy $VOXXR_HOME/voxxr-room/src/main/scripts/deploy-room-remote.sh "install.sh"
ovhcloud instance ssh $INSTANCE copy $VOXXR_HOME/voxxr-room/src/main/scripts/voxxr-room.properties "voxxr-room.properties"
ovhcloud instance ssh $INSTANCE copy $VOXXR_HOME/out/production/voxxr-room.tgz "voxxr-room.tgz"
echo "executing remote install..."
ovhcloud instance ssh $INSTANCE bash "./install.sh" $ROOM $SEED_IP $HOST $ROOM_IP $JVMMS $JVMMX
echo "ROOM $ROOM STARTED AT IP $ROOM_IP"
echo "VOXXR-ROOM::$ROOM::$ROOM_IP" |
const HtmlWebpackPlugin = require('html-webpack-plugin');
const paths = require('react-scripts/config/paths');
const rewireEntry = (entrys) => {
const entry = getEntryObject(entrys);
return {
rewireWebpackEntryConfig: (config, env) => {
config = rewireEntryConfig(entry, config, env);
config = rewireOutputConfig(config, env);
config = rewireHtmlWebpackPlugin(entry, config, env);
return config;
},
rewireDevServerkEntryConfig: (config) => {
const rewrites = [];
Object.keys(entry).map(name => {
const reg = new RegExp(`^\\/${name}\\.html`);
rewrites.push({ from: reg, to: `/${paths.appBuild}/${name}.html` });
return name;
});
config.historyApiFallback.rewrites = rewrites;
return config;
},
};
};
const getEntryObject = (entry) => {
if (!(entry instanceof Array)) {
return entry;
}
const obj = {};
for (let i = 0, l = entry.length; i < l; i += 1) {
const name = getNameByEntrySrc(entry[i]);
obj[name] = entry[i];
}
return obj;
};
const getNameByEntrySrc = (entrySrc) => {
const match = entrySrc.match(/^(.*[\/\\])?([^/.]+)\.[^.]+$/);
if (match) {
return match[2];
} else {
return 'index';
}
};
const rewireEntryConfig = (entry, config, env) => {
const entryDependences = [];
if ('development' !== env) {
entryDependences.push(require.resolve('react-dev-utils/webpackHotDevClient'));
}
const entryConfig = {};
Object.entries(entry).map(([name, entrySrc]) => {
entryConfig[name] = [...entryDependences, entrySrc];
return name;
});
config.entry = entryConfig;
return config;
};
const rewireOutputConfig = (config, env) => {
if ('development' === env) {
config.output.filename = 'static/js/[name].bundle.js';
}
return config;
};
const rewireHtmlWebpackPlugin = (entry, config, env) => {
config.plugins = removePlugin(config.plugins, (name) => /HtmlWebpackPlugin/i.test(name));
Object.keys(entry).map(name => {
const htmlPlugin = createHtmlWebpackPlugin(name, env);
config.plugins.unshift(htmlPlugin);
return name;
});
return config;
};
const removePlugin = (plugins, nameMatcher) => {
return plugins.filter((it) => {
return !(it.constructor && it.constructor.name && nameMatcher(it.constructor.name));
});
};
const createHtmlWebpackPlugin = (entryName, env) => {
const config = {
inject: true,
template: paths.appHtml,
chunks: ['vendors', `runtime~${entryName}`, entryName],
filename: `${entryName}.html`,
};
if ('development' === env) {
return new HtmlWebpackPlugin(config);
} else {
return new HtmlWebpackPlugin({
...config,
minify: {
removeComments: true,
collapseWhitespace: true,
removeRedundantAttributes: true,
useShortDoctype: true,
removeEmptyAttributes: true,
removeStyleLinkTypeAttributes: true,
keepClosingSlash: true,
minifyJS: true,
minifyCSS: true,
minifyURLs: true,
},
});
}
};
module.exports = rewireEntry;
|
<gh_stars>0
/* An STM32 HAL library written for the the MAX30100 pulse oximeter and heart rate sensor. */
#include "max30100_for_stm32_hal.h"
#include "main.h"
#ifdef __cplusplus
extern "C"{
#endif
I2C_HandleTypeDef *_max30100_ui2c;
UART_HandleTypeDef *_max30100_uuart;
uint8_t _max30100_it_byte = 0x00;
uint8_t _max30100_mode;
uint8_t _max30100_mode_prev;
uint16_t _max30100_ir_sample[16];
uint16_t _max30100_red_sample[16];
uint8_t _max30100_ir_current;
uint8_t _max30100_red_current;
uint8_t _max30100_ir_current_prev;
uint8_t _max30100_red_current_prev;
float _max30100_temp;
void MAX30100_Init(I2C_HandleTypeDef *ui2c, UART_HandleTypeDef *uuart){
_max30100_ui2c = ui2c;
_max30100_uuart = uuart;
MAX30100_Stop();
MAX30100_ClearFIFO();
}
uint8_t MAX30100_ReadReg(uint8_t regAddr){
uint8_t reg = regAddr, result;
HAL_I2C_Master_Transmit(_max30100_ui2c, MAX30100_I2C_ADDR, ®, 1, MAX30100_TIMEOUT);
HAL_I2C_Master_Receive(_max30100_ui2c, MAX30100_I2C_ADDR, &result, 1, MAX30100_TIMEOUT);
return result;
}
void MAX30100_WriteReg(uint8_t regAddr, uint8_t byte){
uint8_t reg[2] = { regAddr, byte };
HAL_I2C_Master_Transmit(_max30100_ui2c, MAX30100_I2C_ADDR, reg, 2, MAX30100_TIMEOUT);
}
void MAX30100_EnableInterrupt(uint8_t a_full, uint8_t tmp_rdy, uint8_t hr_rdy, uint8_t spo2){
uint8_t itReg = ((a_full & 0x01) << MAX30100_ENB_A_FULL) | ((tmp_rdy & 0x01) << MAX30100_ENB_TMP_RDY) | ((hr_rdy & 0x01) << MAX30100_ENB_HR_RDY) | ((spo2 & 0x01) << MAX30100_ENB_SPO2_RDY);
MAX30100_WriteReg(MAX30100_INTERRUPT_ENB, itReg);
}
void MAX30100_SetMode(enum MAX30100_Mode mode){
_max30100_mode = mode;
uint8_t modeReg = (MAX30100_ReadReg(MAX30100_MODE_CONFIG) & ~(0x07)) | ((mode << MAX30100_MODE) & 0x07);
if(mode == MAX30100_SPO2_MODE)
modeReg |= 0x08;
else
modeReg &= ~0x08;
MAX30100_WriteReg(MAX30100_MODE_CONFIG, modeReg);
if(_max30100_mode == MAX30100_SPO2_MODE)
MAX30100_EnableInterrupt(0, 1, 0, 0);
else if(_max30100_mode == MAX30100_HRONLY_MODE)
MAX30100_EnableInterrupt(1, 0, 0, 0);
else
MAX30100_EnableInterrupt(0, 0, 0, 0);
}
void MAX30100_SetSpO2SampleRate(enum MAX30100_SpO2SR sr){
uint8_t spo2Reg = MAX30100_ReadReg(MAX30100_SPO2_CONFIG);
spo2Reg = ((sr << MAX30100_SPO2_SR) & 0x1c) | (spo2Reg & ~0x1c);
MAX30100_WriteReg(MAX30100_SPO2_CONFIG, spo2Reg);
}
void MAX30100_SetLEDPulseWidth(enum MAX30100_LEDPulseWidth pw){
uint8_t spo2Reg = MAX30100_ReadReg(MAX30100_SPO2_CONFIG);
spo2Reg = ((pw << MAX30100_LED_PW) & 0x03) | (spo2Reg & ~0x03);
MAX30100_WriteReg(MAX30100_SPO2_CONFIG, spo2Reg);
}
void MAX30100_SetLEDCurrent(enum MAX30100_LEDCurrent redpa, enum MAX30100_LEDCurrent irpa){
_max30100_red_current = redpa;
_max30100_ir_current = irpa;
MAX30100_WriteReg(MAX30100_LED_CONFIG, (redpa << MAX30100_LED_RED_PA) | irpa);
}
void MAX30100_ClearFIFO(void){
MAX30100_WriteReg(MAX30100_FIFO_WR_PTR, 0x00);
MAX30100_WriteReg(MAX30100_FIFO_RD_PTR, 0x00);
MAX30100_WriteReg(MAX30100_OVF_COUNTER, 0x00);
}
void MAX30100_ReadFIFO(void){
uint8_t num_sample = 64;
uint8_t fifo_data[64] = { 0 };
uint8_t reg = MAX30100_FIFO_DATA;
HAL_I2C_Master_Transmit(_max30100_ui2c, MAX30100_I2C_ADDR, ®, 1, MAX30100_TIMEOUT);
HAL_I2C_Master_Receive(_max30100_ui2c, MAX30100_I2C_ADDR, fifo_data, num_sample, MAX30100_TIMEOUT);
for(uint8_t i = 0; i < num_sample; i += 4){
_max30100_ir_sample[i / 4] = (fifo_data[i] << 8) | fifo_data[i + 1];
_max30100_red_sample[i / 4] = (fifo_data[i + 2] << 8) | fifo_data[i + 3];
}
}
#ifdef __cplusplus
}
#endif
|
<reponame>MarcosRibas/Projeto100Exercicios<filename>Python/ex074.py
"""Ex074 Crie um programa que vai gerar cinco números aleatórios e colocar em uma tupla.
Depois disso, mostre a listagem de números gerados s também indique o menor e o maior valor que estão na tupla
"""
from random import randint
s = (randint(1,10), randint(1,10), randint(1,10), randint(1, 10), randint(1, 10))
print(f'Números sorteados: {s}')
print(f'O maior número sorteado foi {max(s)}, e o menor foi {min(s)}')
|
var fs = require('fs');
var fsp = require('path');
var input = fsp.join(__dirname, 'node_modules/@types/node/index.d.ts');
var output = fsp.join(__dirname, 'index.d.ts');
var openRe = /\{\s*$/;
var closeRe = /^\s*\}/;
var importRe = /^\s*import /;
var ns = false;
var skippingModule = false;
var result = fs.readFileSync(input, 'utf8').split('\n').reduce(function(lines, line) {
var m;
// skip modules that generate conflicting type definitions
if (/^declare module "(tls|stream|http2")/.test(line)) {
skippingModule = true;
return lines;
} else if (skippingModule && /^\}/.test(line)) {
skippingModule = false;
return lines;
} else if (skippingModule) {
return lines;
}
if (m = /^\s*(?:\w|\().*?(\w+)\s*\??\s*:\s*(\(err[^)]*\)\s*=>\s*(?:void|any))\s*\)\s*:\s*([^;\)]+)/.exec(line)) {
if (/\bevent: "(error|lookup)",/.test(line)) return lines;
var pair = m[2].split(/\)\s*=>\s*/);
var newTypes = pair[0].split(',').map(arg => arg.split(/:\s*/)[1]);
var oldType = pair[1];
if (oldType !== 'void' && oldType !== 'any' && oldType !== newTypes[0]) {
console.error("type mismatch: " + line + '\n' + oldType + '\n' + newTypes);
} else {
line = line.replace(m[1], '_');
var magic = ns ? 'Streamline._' : '_';
lines.push(line.replace(m[2], magic).replace(m[3], newTypes[1] || 'void'));
if (newTypes.length > 2) {
lines.push(line.replace(m[2], '[' + magic + ']').replace(m[3], '[' + newTypes.slice(1).join(', ') + ']'));
}
}
} else if (importRe.test(line)) {
lines.push(line);
} else if (openRe.test(line)) {
// keep imports but replace class ... implements ... by interface ...
line = line.replace(/\bclass (.*) (implements|extends) (.*) \{/, "interface $1 {");
lines.push(line);
// insert streamline-runtime import
m = /^declare\s*(module|namespace)/.exec(line);
if (m) {
ns = m[1] === 'namespace';
if (!ns) lines.push(/^ */.exec(line)[0] + ' import { _ } from "streamline-runtime";');
}
} else if (/export function exists\(path.*, callback\?:/.test(line)) {
// special handling for fs.exists
lines.push(' export function exists(path: string | Buffer, _: _, __: _): void;');
} else if (closeRe.test(line)) {
// backtrack on imports
var i = lines.length;
while (--i >= 0 && importRe.test(lines[i])) void 0;
// if empty or only imports, discard it.
if (openRe.test(lines[i])) {
lines.splice(i, lines.length - i);
} else {
lines.push(line);
}
}
return lines;
}, process.argv[2] === '--debug' ? [
'/// <reference path="./typings/index.d.ts" />',
'/// <reference path="./typings/tsd.d.ts" />'
] : []).join('\n');
fs.writeFileSync(output, result, 'utf8'); |
quickSort :: (Ord a) => [a] -> [a]
quickSort [] = []
quickSort (x:xs) =
let smallOrEqual = [a | a <- xs, a <= x]
larger = [a | a <- xs, a > x]
in quickSort smallOrEqual ++ [x] ++ quickSort larger |
#!/bin/sh
cat <<'EOF' >> /root/.bashrc
export NVM_DIR="/home/app/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # This loads nvm
EOF
|
package trace
import (
"io"
"github.com/fighthorse/redisAdmin/component/conf"
"github.com/opentracing/opentracing-go"
)
var (
traceServiceName = "app"
traceFileName = "/data/logs/trace/trace_redis.log"
tracesamplingRate = 0.0001
traceCloser io.Closer
)
func Init() {
traceServiceName = conf.GConfig.Trace.ServiceName
traceFileName = conf.GConfig.Trace.FilePath
tracesamplingRate = conf.GConfig.Trace.Sampling
setGlobalTrace()
}
func setGlobalTrace() {
if traceCloser != nil {
traceCloser.Close()
traceCloser = nil
}
tracer, closer, _ := NewJaegerTracer(traceServiceName, traceFileName, tracesamplingRate, nil, 0)
opentracing.SetGlobalTracer(tracer)
traceCloser = closer
}
|
<gh_stars>1-10
export type Method = 'OPTIONS' | 'GET' | 'HEAD' | 'POST' | 'PUT' | 'DELETE' | 'TRACE' | 'CONNECT';
export interface Config {
contentType?: string;
/** 是否显示Toast */
useErrMsg?: boolean;
}
|
#! /bin/sh
#PBS -l nodes=1:ppn=1
#PBS -l walltime=1:00:00
#PBS -j oe
if [ -n "$PBS_JOBNAME" ]
then
source "${PBS_O_HOME}/.bash_profile"
cd "$PBS_O_WORKDIR"
module load gcc/5.3.0
fi
prefix=../../gekko-output/no-data-run-4
ecoevolity --seed 325760832 --prefix ../../gekko-output/no-data-run-4 --ignore-data --relax-missing-sites --relax-constant-sites --relax-triallelic-sites ../../configs/gekko-conc044-rate005-nopoly-varonly.yml 1>../../gekko-output/no-data-run-4-gekko-conc044-rate005-nopoly-varonly.out 2>&1
|
<?php
$term = $_GET['term'];
$term = "%" . $term . "%";
$conn = mysqli_connect("localhost", "username", "password", "database");
if (!$conn) {
die("Connection failed: " . mysqli_connect_error());
}
$sql = "SELECT title FROM books WHERE title LIKE ?;";
$stmt = mysqli_prepare($conn, $sql);
mysqli_stmt_bind_param($stmt, 's', $term);
mysqli_stmt_execute($stmt);
$result = mysqli_stmt_get_result($stmt);
if (mysqli_num_rows($result) > 0) {
while($row = mysqli_fetch_array($result)) {
echo $row['title'] . "\n";
}
}
?> |
<gh_stars>0
package py.edu.uca.lp3.service;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import py.edu.uca.lp3.domain.Ong;
import py.edu.uca.lp3.repository.OngRepository;
import py.edu.uca.lp3exceptions.InscripcionException;
@Service
public class OngService {
@Autowired
private OngRepository ongRepository;
public Ong findById(Long id) {
Ong ong = ongRepository.findOne(id);
return ong;
}
public void saveList(List<Ong> ongs) throws InscripcionException {
for (Ong aGuardar : ongs) {
save(aGuardar);
}
}
public List<Ong> findAll() {
List<Ong> ongs = new ArrayList<>();
Iterator<Ong> iteratorOngs = ongRepository.findAll().iterator();
while (iteratorOngs.hasNext()) {
ongs.add(iteratorOngs.next());
}
return ongs;
}
public void save(Ong ong) {
ongRepository.save(ong);
}
public void delete(Long id) {
ongRepository.delete(id);
}
}
|
public enum DataMode {
NORMAL,
HIGHLIGHTED,
COMPRESSED
}
public class ColumnBuilder {
private DataMode mode;
public ColumnBuilder setMode(DataMode mode) {
this.mode = mode;
return this;
}
public DataMode getMode() {
return mode;
}
} |
#!/bin/bash
function error {
printf "\e[91m$@\e[0m\n"
}
function usage {
echo "###############################################################################"
echo "# run 'verify.integ.graphql-schema.sh --start' to deploy #"
echo "# run 'verify.integ.graphql-schema.sh --check [APIKEY] [ENDPOINT]' to run check #"
echo "# run 'verify.integ.graphql-schema.sh --clean' to clean up stack #"
echo "###############################################################################"
}
if [[ "$1" == "--start" ]]; then
cdk deploy --app "node integ.graphql-schema.js"
elif [[ "$1" == "--check" ]]; then
if [[ -z $2 || -z $3 ]]; then
error "Error: --check flag requires [APIKEY] [ENDPOINT]"
usage
exit 1
fi
echo THIS TEST SHOULD PRODUCE AN EMPTY LIST
curl -XPOST -H "Content-Type:application/graphql" -H "x-api-key:$2" -d '{ "query": "query { getPlanets { id name } }" }" }' $3
echo ""
echo THIS TEST SHOULD RETURN A PLANET OBJECT
curl -XPOST -H "Content-Type:application/graphql" -H "x-api-key:$2" -d '{ "query": "mutation ($name: String!) { addPlanet(name: $name) { id name diameter } }", "variables": { "name": "smolPlanet" } }' $3
echo ""
echo THIS TEST SHOULD PRODUCE AN LIST WITH A SINGLE PLANET
curl -XPOST -H "Content-Type:application/graphql" -H "x-api-key:$2" -d '{ "query": "query { getPlanets { id name } }" }" }' $3
echo ""
elif [[ "$1" == "--clean" ]];then
cdk destroy --app "node integ.graphql-schema.js"
else
error "Error: use flags --start, --check, --clean"
usage
exit 1
fi |
package drogaria;
import java.util.Scanner;
public class compra {
public String remedio;
public int qtdRemedio;
public float valorRemedio;
public String produto;
public int qtdProduto;
public float valorProduto;
Scanner ler = new Scanner(System.in);
public compra(String remedio, int qtdRemedio, float valorRemedio, String produto, int qtdProduto, float valorProduto) {
this.remedio = remedio;
this.qtdRemedio = qtdRemedio;
this.valorRemedio = valorRemedio;
this.produto = produto;
this.qtdProduto = qtdProduto;
this.valorProduto = valorProduto;
}
public void escolherCompra(){
System.out.println("Escolha o remedio desejado: "+remedio);
remedio = ler.nextLine();
}
public float compraValorTotal(float valorRemedio , float valorProduto ){
System.out.println("O valor total da compra é: "+(valorRemedio+valorProduto));
}
public compra() {
}
}
|
<filename>src/imageTools/mouseWheelTool.js<gh_stars>1-10
(function($, cornerstone, cornerstoneTools) {
'use strict';
function mouseWheelTool(mouseWheelCallback) {
var toolInterface = {
activate: function(element) {
$(element).off('CornerstoneToolsMouseWheel', mouseWheelCallback);
var eventData = {
};
$(element).on('CornerstoneToolsMouseWheel', eventData, mouseWheelCallback);
},
disable: function(element) {$(element).off('CornerstoneToolsMouseWheel', mouseWheelCallback);},
enable: function(element) {$(element).off('CornerstoneToolsMouseWheel', mouseWheelCallback);},
deactivate: function(element) {$(element).off('CornerstoneToolsMouseWheel', mouseWheelCallback);}
};
return toolInterface;
}
// module exports
cornerstoneTools.mouseWheelTool = mouseWheelTool;
})($, cornerstone, cornerstoneTools);
|
<gh_stars>0
package com.meterware.httpunit;
/********************************************************************************************************************
* $Id: FrameSelector.java 688 2004-09-29 17:15:27Z russgold $
*
* Copyright (c) 2004, <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
* to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
* THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
*******************************************************************************************************************/
/**
* An immutable class which describes the position of a frame in the window hierarchy.
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
*
* @since 1.6
**/
public class FrameSelector {
public static FrameSelector TOP_FRAME = new FrameSelector( WebRequest.TOP_FRAME );
static FrameSelector NEW_FRAME = new FrameSelector( WebRequest.TOP_FRAME );
private String _name;
private WebWindow _window;
private FrameSelector _parent;
FrameSelector() {
_name = super.toString();
}
FrameSelector( String name ) {
_name = name;
}
FrameSelector( String name, FrameSelector parent ) {
_name = name;
_parent = parent;
}
String getName() {
return _name;
}
FrameSelector getParent() {
return _parent;
}
public String toString() {
return "Frame Selector: [ " + getFullName() + " ]";
}
private String getFullName() {
return _name + (_parent == null ? "" : " in " + _parent.getFullName() );
}
WebWindow getWindow() {
return _window != null ? _window
: (_parent == null ? null : _parent.getWindow());
}
static FrameSelector newTopFrame( WebWindow window ) {
return new FrameSelector( WebRequest.TOP_FRAME, window );
}
private FrameSelector( String name, WebWindow window ) {
_name = name;
_window = window;
}
}
|
import click
from .cmd_key import cli as revoke
@click.command('revoke', short_help='Revoke API key')
@click.option('--api-key', '-K', required=True, help='API Key or UUID')
@click.pass_context
def cli(ctx, api_key):
ctx.invoke(revoke, delete=api_key)
|
<reponame>souzafcharles/Data-Structure
/*
Class title: Data Structure
Lecturer: Prof. Dr. <NAME>
Example adapted by: <NAME>
Date: October 26, 2021
*/
#include<stdio.h>
#include <stdlib.h>
typedef struct No {
int v;
struct No *prox;
} No;
typedef struct {
No **adjacencia;
int n;
} Grafo;
void inicia_grafo(Grafo *g, int n);
void destroi_grafo(Grafo g);
void insere_aresta(Grafo g, int u, int v);
void remove_aresta(Grafo g, int u, int v);
int tem_aresta(Grafo g, int u, int v);
void imprime_arestas(Grafo g);
void inicia_grafo(Grafo *g, int n) {
int i;
g->n = n;
g->adjacencia = malloc(n * sizeof(No *));
for (i = 0; i < n; i++)
g->adjacencia[i] = NULL;
}
void libera_lista(No *lista) {
if (lista != NULL) {
libera_lista(lista->prox);
free(lista);
}
}
void destroi_grafo(Grafo g) {
int i;
for (i = 0; i < g.n; i++)
libera_lista(g.adjacencia[i]);
free(g.adjacencia);
}
No* insere_na_lista(No *lista, int v) {
No *novo = malloc(sizeof(No));
novo->v = v;
novo->prox = lista;
return novo;
}
void insere_aresta(Grafo g, int u, int v) {
g.adjacencia[v] = insere_na_lista(g.adjacencia[v], u);
g.adjacencia[u] = insere_na_lista(g.adjacencia[u], v);
}
No * remove_da_lista(No *lista, int v) {
No *proximo;
if (lista == NULL)
return NULL;
else if (lista->v == v) {
proximo = lista->prox;
free(lista);
return proximo;
} else {
lista->prox = remove_da_lista(lista->prox, v);
return lista;
}
}
void remove_aresta(Grafo g, int u, int v) {
g.adjacencia[u] = remove_da_lista(g.adjacencia[u], v);
g.adjacencia[v] = remove_da_lista(g.adjacencia[v], u);
}
int tem_aresta(Grafo g, int u, int v) {
No *t;
for (t = g.adjacencia[u]; t != NULL; t = t->prox) {
if (t->v == v)
return 1;
}
return 0;
}
void imprime_arestas(Grafo g) {
int u;
No *t;
for (u = 0; u < g.n; u++) {
for (t = g.adjacencia[u]; t != NULL; t = t->prox)
printf("{%d,%d}\n", u, t->v);
}
}
int main(){
Grafo *ponteiroGrafo = inicia_grafo;
inicia_grafo(ponteiroGrafo,0);
inicia_grafo(ponteiroGrafo,1);
inicia_grafo(ponteiroGrafo,4);
inicia_grafo(ponteiroGrafo,9);
inicia_grafo(ponteiroGrafo,7);
inicia_grafo(ponteiroGrafo,2);
}
|
/**
* Java class representation of an employee
*/
public class Employee {
private String name;
private String address;
private int age;
private double salary;
public Employee(String name, String address, int age, double salary) {
this.name = name;
this.address = address;
this.age = age;
this.salary = salary;
}
public String getName() {
return name;
}
public String getAddress() {
return address;
}
public int getAge() {
return age;
}
public double getSalary() {
return salary;
}
public void setSalary(double salary) {
this.salary = salary;
}
} |
Ext.application({
name: "GeekFlicks",
appFolder: "app",
controllers: ['Movies'],
launch: function () {
Ext.create('Ext.container.Viewport', {
layout: 'fit',
items: [{
xtype: 'panel',
title: 'Top Geek Flicks of All Time',
items: [{
xtype: 'movieseditor'
}]
}]
});
}
});
|
<gh_stars>1-10
# profile.py
import time
import os
import psutil
import inspect
def elapsed_since(start):
#return time.strftime("%H:%M:%S", time.gmtime(time.time() - start))
elapsed = time.time() - start
if elapsed < 1:
return str(round(elapsed*1000,2)) + "ms"
if elapsed < 60:
return str(round(elapsed, 2)) + "s"
if elapsed < 3600:
return str(round(elapsed/60, 2)) + "min"
else:
return str(round(elapsed / 3600, 2)) + "hrs"
def get_process_memory():
process = psutil.Process(os.getpid())
mi = process.memory_info()
return mi.rss, mi.vms, mi.shared
def format_bytes(bytes):
if abs(bytes) < 1000:
return str(bytes)+"B"
elif abs(bytes) < 1e6:
return str(round(bytes/1e3,2)) + "kB"
elif abs(bytes) < 1e9:
return str(round(bytes / 1e6, 2)) + "MB"
else:
return str(round(bytes / 1e9, 2)) + "GB"
def profile(func, *args, **kwargs):
def wrapper(*args, **kwargs):
rss_before, vms_before, shared_before = get_process_memory()
cpu1 = psutil.cpu_percent()
start = time.time()
result = func(*args, **kwargs)
elapsed_time = elapsed_since(start)
cpu2= psutil.cpu_percent()
rss_after, vms_after, shared_after = get_process_memory()
print("Profiling: {:>20} RSS: {:>8} | VMS: {:>8} | SHR {"
":>8} | time: {:>8} | CPU% {:>8}"
.format("<" + func.__name__ + ">",
format_bytes(rss_after - rss_before),
format_bytes(vms_after - vms_before),
format_bytes(shared_after - shared_before),
elapsed_time, (cpu1+cpu2)/2))
return result
if inspect.isfunction(func):
return wrapper
elif inspect.ismethod(func):
return wrapper(*args,**kwargs) |
#define _XOPEN_SOURCE 500 // usleep (>= 500)
#include <features.h>
#include <stdio.h>
#include <memory.h>
#include "defs.h"
#include <unistd.h> // ssize_t
#include <sys/select.h> // fd_set
#include "fakeconn.h"
#define VT100BUF_MAX 40
typedef struct vt100out vt100out_s;
struct vt100out {
char buf[VT100BUF_MAX];
size_t begin;
size_t end;
};
static ssize_t vt100out_fifo_write (void *this_, const void *buf_, size_t len)
{
vt100out_s *m_;
m_ = (vt100out_s *)this_;
const char *buf;
buf = (const char *)buf_;
size_t i;
for (i = 0; i < len; ++i) {
m_->buf[m_->end++] = *buf++;
if (! (m_->end < VT100BUF_MAX))
m_->end = 0;
}
return (ssize_t)len;
}
static ssize_t vt100out_fifo_read (void *this_, void *buf_, size_t len)
{
vt100out_s *m_;
m_ = (vt100out_s *)this_;
char *buf;
buf = (char *)buf_;
while (m_->end == m_->begin)
usleep (16 * 1000);
for (; 0 < len && !(m_->end == m_->begin); --len) {
*buf++ = m_->buf[m_->begin++];
if (! (m_->begin < VT100BUF_MAX))
m_->begin = 0;
}
return buf - (const char *)buf_;
}
static bool vt100out_fifo_connect (void *this_, uint16_t ws_col, uint16_t ws_row) { return true; }
static void vt100out_fifo_dtor (void *this_)
{
vt100out_s *m_;
m_ = (vt100out_s *)this_;
memset (m_, 0, sizeof(vt100out_s));
}
static struct conn_i vt100out;
struct conn_i *vt100out_fifo_ctor (void *this_, size_t cb)
{
if (cb < sizeof(vt100out_s))
return NULL;
vt100out_s *m_;
m_ = (vt100out_s *)this_;
memset (m_, 0, sizeof(vt100out_s));
return &vt100out;
}
static struct conn_i vt100out = {
vt100out_fifo_dtor,
vt100out_fifo_connect,
vt100out_fifo_write,
vt100out_fifo_read,
};
|
#!/bin/bash
git clone https://github.com/pyiron/pyiron_continuum "$HOME"/pyiron_continuum/
cp "$HOME"/pyiron_continuum/notebooks/fenics_tutorial.ipynb "$HOME"/
cp "$HOME"/pyiron_continuum/notebooks/damask_tutorial.ipynb "$HOME"/
rm -r "$HOME"/pyiron_continuum
rm "$HOME"/*.yml
rm "$HOME"/Dockerfile
rm "$HOME"/*.sh
|
# Install zip and start a virtual frame buffer.
if [ "$DRONE" = "true" ]; then
sudo apt-get -y -q install zip
sudo apt-get -y -q install libappindicator1
curl -O https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb
sudo dpkg -i google-chrome-stable_current_amd64.deb
sudo start xvfb
export HAS_DARTIUM=true
fi
set -o errexit
# Display installed versions.
dart --version
/usr/bin/google-chrome --version
# Get our packages.
pub get
# Build the archive.
if test x$DRONE_REPO_SLUG = xgithub.com/dart-lang/spark -o x$FORCE_NIGHTLY = xyes ; then
# Retrieve configuration from the master branch
curl -o tool/release-config.json \
https://raw.githubusercontent.com/dart-lang/chromedeveditor/master/ide/tool/release-config.json
./grind release-nightly
else
./grind deploy
fi
./grind mode-test
# Turn on fast fail for the bash script.
set -e
# Run tests on the Dart version of the app.
if [ "$HAS_DARTIUM" = "true" ]; then
dart tool/test_runner.dart --dartium
fi
# Run tests on the dart2js version of the app.
if [ "$DRONE" = "true" ]; then
dart tool/test_runner.dart --chrome-dev
else
dart tool/test_runner.dart --chrome
fi
|
<gh_stars>0
destination = input()
while destination != "End":
need_money = int(input())
money = 0
while money < need_money:
saved_money = int(input())
money += saved_money
else:
print(f"Going to {destination}")
destination = input() |
#!/bin/bash
# Copyright 2021 CYBERCRYPT
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Run all unit tests. Usage:
# ./scripts/unit_tests.sh [coverage]
set -euo pipefail
source ./scripts/build-env
source ./scripts/dev-env
COVERAGE=""
if [[ ${1-} == "coverage" ]]; then
COVERAGE="-coverprofile coverage-unit.out"
fi
export TEST_FOLDERS=$(go list ./... | grep -vE 'encryption-service$|tests')
echo '[*] testfolders: '
echo $TEST_FOLDERS
echo '[*] running unit tests'
go test -count=1 ${COVERAGE} -v $TEST_FOLDERS
|
import Foundation
enum CharacterSetName: String, Decodable {
case letters, uppercaseLetters, lowercaseLetters, alphaNumerics, decimalDigits, whitespacesAndNewlines, whitespaces, newlines, backslash
}
struct Terminal: Decodable {
let name: String
let string: String
}
func countCharacterSets(in terminalString: String, using characterSets: [CharacterSetName]) -> [CharacterSetName: Int] {
var characterSetCounts: [CharacterSetName: Int] = [:]
for characterSet in characterSets {
let count: Int
switch characterSet {
case .letters:
count = terminalString.countCharacters(in: .letters)
case .decimalDigits:
count = terminalString.countCharacters(in: .decimalDigits)
case .whitespaces:
count = terminalString.countCharacters(in: .whitespaces)
default:
count = 0
}
characterSetCounts[characterSet] = count
}
return characterSetCounts
}
extension String {
func countCharacters(in characterSet: CharacterSet) -> Int {
return self.unicodeScalars.filter { characterSet.contains($0) }.count
}
}
func processTerminalJSON() {
guard let url = Bundle.main.url(forResource: "config", withExtension: "json") else {
print("Error: JSON file not found")
return
}
do {
let data = try Data(contentsOf: url)
let decoder = JSONDecoder()
let config = try decoder.decode([String: [Terminal]].self, from: data)
if let terminals = config["terminals"], let characterSets = config["characterSets"] {
for terminal in terminals {
print("Terminal: \(terminal.name)")
print("Length: \(terminal.string.count)")
print("Character Set Counts:")
let counts = countCharacterSets(in: terminal.string, using: characterSets.map { CharacterSetName(rawValue: $0)! })
for (characterSet, count) in counts {
print("- \(characterSet.rawValue): \(count)")
}
print()
}
} else {
print("Error: Invalid JSON format")
}
} catch {
print("Error: \(error.localizedDescription)")
}
}
processTerminalJSON() |
def print_primes(n):
for num in range(2, n+1):
if all(num % i != 0 for i in range(2, num)):
print(num, end=' ') |
<reponame>dvinubius/meta-multisig
import { oneRemInPx } from '~~/styles/styles';
export const remToPx = (v: number): number => oneRemInPx * v;
|
#!/usr/bin/env bash
### ===================
# Prepare the app package, including
#
# - IPA file
# - ".dsym.zip" file
### ===================
echo "iOS_BUILD = $iOS_BUILD"
if [ "$BUILD_APP" != true -o "$iOS_BUILD" != true ]; then
echo "Info: Can only run for iOS build. Skip~~~"
exit 0
fi
OUTPUTDIR="$PWD/platforms/ios/build/device/"
# Generate `ipa` file.
xcrun -log -sdk iphoneos \
PackageApplication -v "$OUTPUTDIR/$APP_NAME.app" \
-o "$OUTPUTDIR/$APP_NAME.ipa"
# Generate `.dsym.zip` file
/usr/bin/zip --verbose --recurse-paths "$OUTPUTDIR/$APP_NAME.dsym.zip" "$OUTPUTDIR/$APP_NAME.app.dsym"
|
'use strict'
/**
* adonis-websocket
*
* (c) <NAME> <<EMAIL>>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
const cluster = require('cluster')
const debug = require('debug')('adonis:websocket')
const receiver = require('./receiver')
const sender = require('./sender')
module.exports = {
/**
* Bind listener to listen for process message
*
* @method init
*
* @return {void}
*/
init () {
if (cluster.isWorker) {
debug('adding listener from worker to receive node message')
process.on('message', receiver)
}
},
/**
* Sends a message out from the process. The cluster should bind
* listener for listening messages.
*
* @method send
*
* @param {String} handle
* @param {String} topic
* @param {Object} payload
*
* @return {void}
*/
send (handle, topic, payload) {
if (cluster.isWorker) {
sender(handle, topic, payload)
}
},
/**
* Clear up event listeners
*
* @method destroy
*
* @return {void}
*/
destroy () {
debug('cleaning up cluster listeners')
process.removeListener('message', receiver)
}
}
|
class Leaderboard:
def __init__(self):
self.player_scores = []
def add_score(self, player_name, score):
for i, (name, _) in enumerate(self.player_scores):
if name == player_name:
self.player_scores[i] = (player_name, score)
break
else:
self.player_scores.append((player_name, score))
def top_n_scores(self, n):
sorted_scores = sorted(self.player_scores, key=lambda x: x[1], reverse=True)
return sorted_scores[:n] |
HPARAMS_STR+="l1_dense_regularizer=true," |
# Authentication: $AMADEUS_CLIENT_ID & $AMADEUS_CLIENT_SECRET can be defined
# in your environmental variables or directly in your script
ACCESS_TOKEN=$(curl -H "Content-Type: application/x-www-form-urlencoded" \
https://test.api.amadeus.com/v1/security/oauth2/token \
-d "grant_type=client_credentials&client_id=$AMADEUS_CLIENT_ID&client_secret=$AMADEUS_CLIENT_SECRET" \
| grep access_token | sed 's/"access_token": "\(.*\)"\,/\1/' | tr -d '[:space:]')
curl -X GET "https://test.api.amadeus.com/v1/travel/analytics/air-traffic/traveled?originCityCode=MAD&period=2017-01" -H "Authorization: Bearer $ACCESS_TOKEN" -k |
<filename>example37-three-to-handle-goroutine/Context/main.go
package main
import (
"context"
"fmt"
"time"
)
func foo(ctx context.Context, name string) {
go bar(ctx, name) // A calls B
for {
select {
case <-ctx.Done():
fmt.Println(name, "A Exit")
return
case <-time.After(1 * time.Second):
fmt.Println(name, "A do something")
}
}
}
func bar(ctx context.Context, name string) {
for {
select {
case <-ctx.Done():
fmt.Println(name, "B Exit")
return
case <-time.After(2 * time.Second):
fmt.Println(name, "B do something")
}
}
}
func main() {
ctx, cancel := context.WithCancel(context.Background())
go foo(ctx, "FooBar")
fmt.Println("client release connection, need to notify A, B exit")
time.Sleep(5 * time.Second)
cancel() //mock client exit, and pass the signal, ctx.Done() gets the signal time.Sleep(3 * time.Second)
time.Sleep(3 * time.Second)
}
|
<html>
<head>
<script src="https://ajax.googleapis.com/ajax/libs/angularjs/1.6.9/angular.min.js"></script>
</head>
<body>
<div ng-app="myApp" ng-controller="myCtrl">
<p>Enter search keyword:<input type="text" ng-model="keyword" /></p>
<button ng-click="search()">Search</button>
<ul>
<li ng-repeat="item in items">
{{item.title}}
</li>
</ul>
</div>
<script>
var app = angular.module('myApp', []);
app.controller('myCtrl', function($scope, $http) {
$scope.search = function() {
$http.get("https://example.api/items?keyword=" + $scope.keyword)
.then(function(response) {
$scope.items = response.data;
});
}
});
</script>
</body>
</html> |
<reponame>RobertStivanson/CPP-Sorts
#include <iostream>
#include <iomanip>
#include <cstdlib>
#include <time.h>
#include <string>
#include <cmath>
#include <algorithm>
using namespace std;
// Global Variables
// These are counters that are used when the sorting happens
int NUMBER_OF_COMPARISONS, NUMBER_OF_SWAPS;
const int MAX_UPPER_BOUND = 100, MAX_LOWER_BOUND = -100;
// This is a value that holds all sorts
// it is used to cycle through every sort
enum Sort {
BOGO_SORT = 0,
BUBBLE_SORT,
SELECTION_SORT,
SHELL_SORT,
MERGE_SORT,
QUICK_SORT,
NONE
};
// Function prototypes
void BogoSort(int * array, const int & size);
void BubbleSort(int * array, const int & size);
void SelectionSort(int * array, const int & size);
void SelectionSortReverse(int * array, const int & size);
void ShellSort(int * array, const int & size);
void MergeSort(int * array, int * second, const int & left, const int & right);
void Merge(int * array, int * second, const int & left, const int & mid, const int & right);
void QuickSort(int * array, const int & low, const int & high);
int Partition(int * array, const int & low, const int & high);
void Swap(int * array, const int & first, const int & second);
string SortToString(Sort sort);
void ResetCounters();
void FillArray(int * array, const int & size, const int & upperBound, const int & lowerBound);
int CalculateSpan(const int & upperBound, const int & lowerBound);
void PrintArray(int * array, const int & size);
void PrintCounters();
void TestSortedArray(int * array, const int & size, int * testArray);
void CopyArray(int * one, const int & size, int * two);
// main
// This is the programs main routine
int main() {
int size;
int * array = NULL,
* second = NULL,
* test = NULL;
bool printAndTest;
// Seed the random number generator
srand(time(NULL));
// While size is less than or equal to 125
for (int size = 5; size <= 15625; size *= 5) {
// Create the arrays
array = new int[size];
second = new int[size];
test = new int[size];
// For every type of sort
for (int i = BOGO_SORT; i < NONE; i++) {
printAndTest = true;
cout << "// ********" << SortToString(Sort(i)) << "******** //" << endl;
// Fill the array with random values
FillArray(array, size, MAX_UPPER_BOUND, MAX_LOWER_BOUND);
// Reverse sort the list
SelectionSortReverse(array, size);
// Reset the sort counters
ResetCounters();
// Copy the unsorted array into a second array for the builtin sort
CopyArray(array, size, test);
// Perform the sort
switch (Sort(i)) {
case BOGO_SORT:
if (size > 5) {
cout << "Bogo sort skipped due to the time requirement of the algorithm." << endl;
printAndTest = false;
} else {
BogoSort(array, size);
}
break;
case BUBBLE_SORT:
BubbleSort(array, size);
break;
case SELECTION_SORT:
SelectionSort(array, size);
break;
case SHELL_SORT:
ShellSort(array, size);
break;
case MERGE_SORT:
MergeSort(array, second, 0, size - 1);
break;
case QUICK_SORT:
QuickSort(array, 0, size - 1);
break;
}
if (printAndTest) {
// Print the array statistics
PrintCounters();
TestSortedArray(array, size, test);
}
cout << endl;
}
// free the dynamic memory
delete [] array;
delete [] second;
delete [] test;
}
return 0;
}
// BogoSort
// params:
// array: This is the array in which the elements are stored
// size: This is the size of the array in which is being sorted
// Performs the Bogo sort algorithm on the array
void BogoSort(int * array, const int & size) {
bool sorted = false;
while (!sorted) {
// For each item in the array randomly swap it with another
for (int i = 0; i < size; ++i) {
Swap(array, i, rand() % size);
}
// Assume the array is sorted
sorted = true;
// Test each item in the array to see if it is sorted
for (int i = 0; i < (size - 1) && sorted; ++i) {
++NUMBER_OF_COMPARISONS;
if (array[i] > array[i + 1]) {
sorted = false;
}
}
}
}
// BubbleSort
// params:
// array: This is the array in which the elements are stored
// size: This is the size of the array in which is being sorted
// Performs the Bubble sort algorithm on the array
void BubbleSort(int * array, const int & size) {
// For (size - 1) interations
for (int i = 0; i < size - 1; ++i) {
// For every element in the array
for (int j = 0; j < size - 1; ++j) {
// Compare the two values, if the first is bigger than the second
if (array[j] > array[j + 1]) {
// Swap the elements
Swap(array, j, (j + 1));
}
// Increment the number of comparisons
++NUMBER_OF_COMPARISONS;
}
}
return;
}
// SelectionSort
// params:
// array: This is the array in which the elements are stored
// size: This is the size of the array in which is being sorted
// Performs the Selection sort algorithm on the array
void SelectionSort(int * array, const int & size) {
int minIndex;
// For every element in the array minus the last one
for (int i = 0; i < size - 1; ++i) {
// Set this to the starting minimum value
minIndex = i;
for (int j = (i + 1); j < size; ++j) {
// If this value is less than the current minimum
if (array[j] < array[minIndex]) {
// Then this is the new minimum value
minIndex = j;
}
// Increment the number of comparisons
++NUMBER_OF_COMPARISONS;
}
// Swap the mininum value to the index i
Swap(array, i, minIndex);
}
return;
}
// SelectionSortReverse
// params:
// array: This is the array in which the elements are stored
// size: This is the size of the array in which is being sorted
// Performs the Selection sort algorithm on the array into descending order
void SelectionSortReverse(int * array, const int & size) {
int minIndex;
// For every element in the array minus the last one
for (int i = 0; i < size - 1; ++i) {
// Set this to the starting minimum value
minIndex = i;
for (int j = (i + 1); j < size; ++j) {
// If this value is less than the current minimum
if (array[j] > array[minIndex]) {
// Then this is the new minimum value
minIndex = j;
}
// Increment the number of comparisons
++NUMBER_OF_COMPARISONS;
}
// Swap the mininum value to the index i
Swap(array, i, minIndex);
}
return;
}
// MergeSort
// params:
// array: This is the array in which the elements are stored
// second: This is the array in which the values should be merged into
// left: This is the left starting index in the array
// right: This is the right ending index in the array
// Performs the MergeSort algorithm on the array
void MergeSort(int * array, int * second, const int & left, const int & right) {
// If the left index is less than the right index
if (left < right) {
// Calculate a new mid point
int mid = (left + right) / 2;
// Split this part of the list in half
MergeSort(array, second, left, mid);
MergeSort(array, second, mid + 1, right);
// Merge these two halfs of the lists
Merge(array, second, left, mid, right);
}
return;
}
// Merge
// params:
// array: This is the array in which the elements are stored
// second: This is the array in which the values should be merged into
// left: This is the left starting index in the array
// mid: This is the value of the ((left + right) / 2) + 1
// right: This is the right ending index in the array
// Merges two sub arrays together in order
void Merge(int * array, int * second, const int & left, const int & mid, const int & right) {
int i = left,
j = mid + 1,
k = 0;
// While we still have two halves to merge
while(i <= mid && j <= right) {
// If the element in the left half is less
if(array[i] < array[j]) {
// Add it to the final array next
second[k++] = array[i++];
// If the element in the right half is less
} else {
// Add it to the final array next
second[k++] = array[j++];
}
// Increment the counters
++NUMBER_OF_COMPARISONS;
}
// If there are remaining elements in the left half
while(i <= mid) {
// add them to the final array
second[k++] = array[i++];
}
// If there are remaining elements in the right half
while(j <= right) {
// Add them to the final array
second[k++] = array[j++];
}
// Copy the contents of the second array to the first
for(i = right; i >= left; i--) {
array[i] = second[--k];
}
return;
}
// ShellSort
// params:
// array: This is the array in which the elements are stored
// size: This is the size of the array in which is being sorted
// Performs the Shellsort algorithm on the array
void ShellSort(int * array, const int & size) {
int tmp;
// Until the gap is less than 1
for (int gap = size / 2; gap > 0; gap /= 2) {
// For every gap
for (int i = gap; i < size; ++i) {
// For every element in this gap
for (int j = i - gap; j >= 0; j -= gap) {
// If the first value is bigger than the second
if (array[j] > array[j + gap]) {
// Swap the values in the array
Swap(array, j, j + 1);
}
// Increment the number of comparisons
++NUMBER_OF_COMPARISONS;
}
}
}
return;
}
// Quicksort
// params:
// array: This is the array in which the elements are stored
// size: This is the size of the array in which is being sorted
// Performs the Quicksort algorithm on the array
void QuickSort(int * array, const int & low, const int & high) {
// If the gap is greater than one
if (low < high) {
// Create a partition point
int partition = Partition(array, low, high);
// Sort the sub arrays
QuickSort(array, low, (partition - 1));
QuickSort(array, (partition + 1), high);
}
return;
}
// Partition
// params:
// array: This is the array in which the elements are stored
// low: This is the lower bound index in the array to partition
// high: This is the higher bound index in the array to partition
// Takes the array and returns a partition point used in the Quick sort algorithm
int Partition(int * array, const int & low, const int & high) {
int pivot = array[high];
int i = low;
// For every element in the sub array
for (int j = low; j < high; ++j) {
// If this element is less than our pivot
if (array[j] <= pivot) {
// Swap the elements
Swap(array, i, j);
++i;
}
// Increment the number of comparisons
++NUMBER_OF_COMPARISONS;
}
// Swap the lowest elements and the last element
Swap(array, i, high);
return i;
}
// Swap
// params:
// array: This is the array in which the elements are stored
// first: This is the index of the first element we are swapping
// second: This is the index of the second element we are swapping
// Swaps two elements in an array
void Swap(int * array, const int & first, const int & second) {
// Swap the elements
int tmp = array[first];
array[first] = array[second];
array[second] = tmp;
// Increment the swap counter
++NUMBER_OF_SWAPS;
return;
}
// ResetCounters
// Resets all global sort counters to zero
void ResetCounters() {
NUMBER_OF_COMPARISONS = 0;
NUMBER_OF_SWAPS = 0;
return;
}
// SortToString
// params:
// sort: This is the enum Sort value we wish to convert
// Converts a sort enum value to a string
string SortToString(Sort sort) {
string str = "";
switch (sort) {
case BOGO_SORT:
str = "Bogo Sort";
break;
case BUBBLE_SORT:
str = "Bubble Sort";
break;
case SELECTION_SORT:
str = "Selection Sort";
break;
case SHELL_SORT:
str = "Shell Sort";
break;
case MERGE_SORT:
str = "Merge Sort";
break;
case QUICK_SORT:
str = "Quick Sort";
break;
}
return str;
}
// FillArray
// params:
// size: this is the size of the array that we would like to create
// upperBound: this is the max value that we would like to possibly be in the array
// lowerBound: This is the min value that we would like to possibly be in the array
// This function will create a return an array of the given size filled with random values between our bounds
void FillArray(int * array, const int & size, const int & upperBound, const int & lowerBound) {
int span = CalculateSpan(upperBound, lowerBound);
// If the size is invalid
if (size <= 0) {
cout << "The size is invalid" << endl;
return;
}
// If the upperBound is bigger than the lowerBound
// If the
if (span > 0) {
// Display the parameters in which the array will be filled
cout << "Filling " << size << " random values between ";
cout << lowerBound << " and " << upperBound << "... ";
// For every element that we wish to be in the array
for (int i = 0; i < size; ++i) {
// Calculate a random value in our span then shift it by the lowerBound
array[i] = rand() % span + lowerBound;
}
// We are done with the filling
cout << "Done!" << endl;
// The span is invalid
} else {
cout << " -- Unable to fill the array. Invalid span: " << span << endl;
}
return;
}
// CalculateSpan
// params:
// upperBound: this is the max value that we would like to possibly be in the array
// lowerBound: This is the min value that we would like to possibly be in the array
// This function will calculate the difference between the upper
// and lower bounds then shift it up to an origin of zero
int CalculateSpan(const int & upperBound, const int & lowerBound) {
int span = 0;
// If the upperBound is bigger than the lowerBound
if (upperBound > lowerBound) {
// If the upperBound is greater than or equal to zero
if (upperBound >= 0) {
// If the lowerBound is less than zero
if (lowerBound < 0) {
// Our span is the upperBound shifted up to include the negative numbers plus zero
span = upperBound + (-1 * lowerBound) + 1;
// If the lowerBound is greater than or equal to zero
} else {
// Our span is just the difference between the upperBound and the lowerBound
span = upperBound - lowerBound;
}
// If the upperBound is less than zero
} else {
// The span is the upperBound shifted up by the lowerBound
span = upperBound - lowerBound;
}
// If ther upperBound and lowerBounds have no middle
} else {
cout << " -- Cannot calculate a value above " << lowerBound << " and below " << upperBound << "!" << endl;
}
return span;
}
// PrintaArray
// params:
// array: This is the array whose contents should be printed
// size: This is the size of the aove array
// This functions print the contents of an array
void PrintArray(int * array, const int & size) {
// For every element in the array
for (int i = 0; i < size; i++) {
// Print this element
cout << array[i] << " ";
}
// Print a new line
cout << endl;
return;
}
// PrintCounters
// Prints the current values of the array sorting counters
void PrintCounters() {
cout << "Performance results:" << endl;
cout << "Comparisons: " << NUMBER_OF_COMPARISONS << endl;
cout << "Swaps: " << NUMBER_OF_SWAPS << endl;
return;
}
// TestSortedArray
// params:
// array: This is the array in which we will test if its correctly sorted
// size: This is the size of both passed arrays
// testArray: This is the array that will be sent through the c++ sort
// Uses the built in sort to sort and test if the array is correctly sorted
void TestSortedArray(int * array, const int & size, int * testArray) {
bool correct = true;
// Sort the test array using the built in standard sort
sort(testArray, testArray + size);
// For every element in the arrays
for (int i = 0; i < size; i++) {
// If the values are not the same
if (array[i] != testArray[i]) {
// The sort is not correct
correct = false;
}
}
// If the sort is not correct
if (!correct) {
cout << "The array was NOT sorted properly!" << endl;
// If the sort is correct
} else {
cout << "The array was sorted properly!" << endl;
}
return;
}
// CopyArray
// params:
// one: This is the array whose values will be copied from
// size: This is the size of the array whose values we are copying from
// two: This is the array in which we are copying values into
// Copys the contents of the first array into the second
void CopyArray(int * one, const int & size, int * two) {
// For every element in the array
for (int i = 0; i < size; ++i) {
// Copy the values over
two[i] = one[i];
}
return;
}
|
<reponame>coveo/tgf-images
import argparse
import logging
import os
import re
import subprocess
from contextlib import contextmanager
from pathlib import Path
from subprocess import CompletedProcess
from typing import Generator, List
TARGET_REGISTRY = "ghcr.io/coveooss/tgf"
def _run_command(command: List[str], capture_output: bool = False) -> CompletedProcess:
command_line = " ".join(command)
logging.info(f"Running command: {command_line}")
return subprocess.run(command, check=True, capture_output=capture_output)
@contextmanager
def docker_buildx_builder() -> Generator[str, None, None]:
logging.info("Creating a docker buildx builder")
process = _run_command(["docker", "buildx", "create"], capture_output=True)
builder_name = process.stdout.decode("utf-8").strip()
logging.info(f"Created docker buildx builder named {builder_name}")
try:
yield builder_name
finally:
logging.info(f"Removing docker buildx builder named {builder_name}")
_run_command(["docker", "buildx", "rm", builder_name])
def build_and_push_dockerfile(
builder: str,
dockerfile: Path,
platform: str,
version: str,
beta: bool = False,
) -> None:
name = dockerfile.name
logging.info(f"Processing file {name}")
tag = (re.match("Dockerfile\.\d+(\.(.+))?", name).group(2) or "").lower()
tag_suffix = f"-{tag}" if tag else ""
version = version.lstrip("v")
version_maj_min = ".".join(version.split(".")[:2])
beta_part = "-beta" if beta else ""
target_tag = f"{TARGET_REGISTRY}:{version}{beta_part}{tag_suffix}"
target_tag_major_min = f"{TARGET_REGISTRY}:{version_maj_min}{beta_part}{tag_suffix}"
dockerfile_content = dockerfile.read_text(encoding="utf-8")
dockerfile_content = dockerfile_content.replace(
"${GIT_TAG}", f"{version}{beta_part}"
)
dockerfile_content = dockerfile_content.replace(
"TGF_IMAGE_MAJ_MIN=", f"TGF_IMAGE_MAJ_MIN={version_maj_min}"
)
temp_dockerfile = Path("dockerfile.temp")
temp_dockerfile.open(mode="w", encoding="utf-8").write(dockerfile_content)
logging.info(f"Building {target_tag} from {dockerfile.relative_to(Path.cwd())}")
_run_command(
[
"docker",
"buildx",
"build",
"--builder",
builder,
"-f",
temp_dockerfile.name,
"-t",
target_tag,
"-t",
target_tag_major_min,
"--platform",
platform,
# We always push because docker buildx does not look at the local
# images, it always pull from repos.
"--push",
".",
]
)
temp_dockerfile.unlink()
def main(version: str, platform: str, beta: bool = False) -> None:
if not re.match(r"^v[^.]+\.[^.]+\.[^.]+$", version):
raise ValueError(
f"Expected version ({version}) be in vX.Y.Z format. It is not."
)
dockerfiles = [
dockerfile
for dockerfile in Path.cwd().glob("Dockerfile*")
if dockerfile.is_file()
]
dockerfiles = sorted(dockerfiles)
relative_dockerfiles = [
str(dockerfile.relative_to(Path.cwd())) for dockerfile in dockerfiles
]
logging.info(
f"Will build the following dockerfiles in order: {relative_dockerfiles}"
)
with docker_buildx_builder() as builder:
for dockerfile in dockerfiles:
build_and_push_dockerfile(builder, dockerfile, platform, version, beta)
if __name__ == "__main__":
logging.getLogger().setLevel(os.getenv("BUILD_IMAGE_LOG_LEVEL", "info").upper())
parser = argparse.ArgumentParser(description="Build and push TGF images")
parser.add_argument("--beta", action="store_true", help="Add -beta to tags")
parser.add_argument(
"--platform",
type=str,
default="linux/arm64/v8,linux/amd64",
help=(
"What platform to build for. "
"Matches docker buildx build --platform. "
"Multiple values can be separated by commas."
),
)
parser.add_argument(
"--version",
required=True,
help="What image version to tag the resulting docker images with. Should be in vX.Y.Z format.",
)
args = parser.parse_args()
main(args.version, args.platform, args.beta)
|
<reponame>zakiahmad857/Advanced-Video<gh_stars>100-1000
#pragma once
// CAssistantBox dialog
#include "AGButton.h"
#include "AgoraCameraManager.h"
#include "AgoraAudInputManager.h"
#include "AgoraPlayoutManager.h"
#include "CHookPlayerInstance.h"
class CAssistantBox : public CDialogEx
{
DECLARE_DYNAMIC(CAssistantBox)
public:
CAssistantBox(CWnd* pParent = NULL); // standard constructor
virtual ~CAssistantBox();
BOOL isShow(){ return m_bIsShow; }
void setWnd(HWND wnd){ m_hWndVideoTest = wnd; }
virtual void OnFinalRelease();
// Dialog Data
enum { IDD = IDD_DIALOG_AssistantBox };
protected:
virtual void DoDataExchange(CDataExchange* pDX); // DDX/DDV support
DECLARE_MESSAGE_MAP()
DECLARE_DISPATCH_MAP()
DECLARE_INTERFACE_MAP()
virtual BOOL OnInitDialog();
afx_msg void OnLButtonDown(UINT nFlags, CPoint point);
afx_msg void OnNcLButtonDown(UINT nHitTest, CPoint point);
afx_msg void OnShowWindow(BOOL bShow, UINT nStatus);
afx_msg void OnLButtonUp(UINT nFlags, CPoint point);
afx_msg void OnNcPaint();
void onButtonCloseClicked();
void onButtonMinClicked();
public:
afx_msg void OnBnClickedButtonInvite();
afx_msg void OnCbnSelchangeComboCamera();
afx_msg void OnCbnSelchangeComboMic();
afx_msg void OnCbnSelchangeComboPlayout();
afx_msg void OnCbnSelchangeComboPlayer();
protected:
inline void initCtrl();
inline void uninitCtrl();
inline void initResource();
inline void uninitResource();
private:
CAGButton m_AgBtnMin;
CAGButton m_AgBtnClose;
BOOL m_bIsShow;
CAgoraObject* m_lpAgoraObject;
IRtcEngine* m_lpRtcEngine;
CAgoraCameraManager m_AgoraCameraManager;
CAgoraPlayoutManager m_AgoraPlayoutManager;
CAgoraAudInputManager m_AgoraAudioInputManager;
HWND m_hWndVideoTest;
CComboBox m_ComCameraList;
CComboBox m_ComAudioInputList;
CComboBox m_ComPlayOutList;
CEdit m_EditChannel;
CEdit m_EditLoginUid;
CEdit m_EditInviter;
CButton m_CheckBoxHook;
CComboBox m_ComHookPlayerList;
CHookPlayerInstance* m_lpHookPlayerInstance;
};
|
#!/bin/bash
# Module specific variables go here
# Files: file=/path/to/file
# Arrays: declare -a array_name
# Strings: foo="bar"
# Integers: x=9
###############################################
# Bootstrapping environment setup
###############################################
# Get our working directory
cwd="$(pwd)"
# Define our bootstrapper location
bootstrap="${cwd}/tools/bootstrap.sh"
# Bail if it cannot be found
if [ ! -f ${bootstrap} ]; then
echo "Unable to locate bootstrap; ${bootstrap}" && exit 1
fi
# Load our bootstrap
source ${bootstrap}
###############################################
# Metrics start
###############################################
# Get EPOCH
s_epoch="$(gen_epoch)"
# Create a timestamp
timestamp="$(gen_date)"
# Whos is calling? 0 = singular, 1 is as group
caller=$(ps $PPID | grep -c stigadm)
###############################################
# Perform restoration
###############################################
# If ${restore} = 1 go to restoration mode
if [ ${restore} -eq 1 ]; then
report "Not yet implemented" && exit 1
fi
###############################################
# STIG validation/remediation
###############################################
# Module specific validation code should go here
# Errors should go in ${errors[@]} array (which on remediation get handled)
# All inspected items should go in ${inspected[@]} array
errors=("${stigid}")
# If ${change} = 1
#if [ ${change} -eq 1 ]; then
# Create the backup env
#backup_setup_env "${backup_path}"
# Create a backup (configuration output, file/folde permissions output etc
#bu_configuration "${backup_path}" "${author}" "${stigid}" "$(echo "${array_values[@]}" | tr ' ' '\n')"
#bu_file "${backup_path}" "${author}" "${stigid}" "${file}"
#if [ $? -ne 0 ]; then
# Stop, we require a backup
#report "Unable to create backup" && exit 1
#fi
# Iterate ${errors[@]}
#for error in ${errors[@]}; do
# Work to remediate ${error} should go here
#done
#fi
# Remove dupes
#inspected=( $(remove_duplicates "${inspected[@]}") )
###############################################
# Results for printable report
###############################################
# If ${#errors[@]} > 0
if [ ${#errors[@]} -gt 0 ]; then
# Set ${results} error message
#results="Failed validation" UNCOMMENT ONCE WORK COMPLETE!
results="Not yet implemented!"
fi
# Set ${results} passed message
[ ${#errors[@]} -eq 0 ] && results="Passed validation"
###############################################
# Report generation specifics
###############################################
# Apply some values expected for report footer
[ ${#errors[@]} -eq 0 ] && passed=1 || passed=0
[ ${#errors[@]} -gt 0 ] && failed=1 || failed=0
# Calculate a percentage from applied modules & errors incurred
percentage=$(percent ${passed} ${failed})
# If the caller was only independant
if [ ${caller} -eq 0 ]; then
# Show failures
[ ${#errors[@]} -gt 0 ] && print_array ${log} "errors" "${errors[@]}"
# Provide detailed results to ${log}
if [ ${verbose} -eq 1 ]; then
# Print array of failed & validated items
[ ${#inspected[@]} -gt 0 ] && print_array ${log} "validated" "${inspected[@]}"
fi
# Generate the report
report "${results}"
# Display the report
cat ${log}
else
# Since we were called from stigadm
module_header "${results}"
# Show failures
[ ${#errors[@]} -gt 0 ] && print_array ${log} "errors" "${errors[@]}"
# Provide detailed results to ${log}
if [ ${verbose} -eq 1 ]; then
# Print array of failed & validated items
[ ${#inspected[@]} -gt 0 ] && print_array ${log} "validated" "${inspected[@]}"
fi
# Finish up the module specific report
module_footer
fi
###############################################
# Return code for larger report
###############################################
# Return an error/success code (0/1)
exit ${#errors[@]}
# Date: 2018-09-27
#
# Severity: CAT-II
# Classification: UNCLASSIFIED
# STIG_ID: V0077505
# STIG_Version: SV-92201r1
# Rule_ID: SLES-12-030450
#
# OS: SLES
# Version: 12
# Architecture:
#
# Title: The SUSE operating system wireless network adapters must be disabled unless approved and documented.
# Description: SLES
|
import sys
import os
import logging
from db.db_handler import *
from utils.crypto_utils import sign_senz
from config.config import *
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
filehandler = logging.FileHandler('logs/stock_exchange.logs')
filehandler.setLevel(logging.INFO)
# create a logging format
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
filehandler.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(filehandler)
class SenzHandler():
"""
Handler incoming senz messages from here. We are dealing with following
senz types
1. GET
2. PUT
3. SHARE
4. DATA
5. DELETE
6. UNSHARE
According to the senz type different operations need to be carry out
"""
def __init__(self, transport):
"""
Initilize udp transport from here. We can use transport to send message
to udp socket
Arg
trnsport - twisted transport instance
"""
self.transport = transport
def handleSenz(self, senz):
"""
Handle differennt types of senz from here. This function will be called
asynchronously. Whenc senz message receives this function will be
called by twisted thread(thread safe mode via twisted library)
"""
#print "Hanlder " ,senz.attributes ,senz.type ,senz.receiver,senz.sender
logger.info('senz received %s' % senz.type)
dbh = db_handler()
# tempory adding function
if (senz.receiver == None):
dbh.testData()
# print senz.type=="DATA" and senz.receiver !=None
if (senz.type == "DATA" and senz.receiver != None):
flag = senz.attributes["#f"]
if(flag=="ct"):
logger.info('Doing p2p Transaction ::%s' % senz)
#print (senz.attributes)
dbh.addCoinWiseTransaction(senz.attributes)
else:
dbh.addCoinWiseTransaction(senz.attributes) # ddd added coinWiseTransaction method
elif (senz.type == "SHARE"):
# print dbh.calulateCoinsValue()
flag = senz.attributes["#f"]
if(flag=="cv"):
senze = 'PUT #COIN_VALUE %s ' % (dbh.calulateCoinsValue())
senz = str(senze) + "@%s ^%s" % (senz.sender, clientname)
signed_senz = sign_senz(senz)
logger.info('Auto Excute: %s' % signed_senz)
self.transport.write(signed_senz)
if(flag=="ctr"):
logger.info('Request Massage Transaction :: %s' % senz)
elif (senz.type == "DELETE"):
coin = senz.attributes["#COIN"]
dbh.delectCoinDetail(coin)
elif (senz.type=="UNSHARE"):
pass
def postHandle(self, arg):
"""
After handling senz message this function will be called. Basically
this is a call back funcion
"""
logger.info("Post Handled")
|
#!/bin/bash
wget -O last_model.pt https://www2.informatik.uni-hamburg.de/WTM/corpora/GASP/gazenet/models/saliency_prediction/gasp/checkpoints/pretrained_sequencegaspdamencgmualstmconv/SequenceGASPDAMEncGMUALSTMConv/53ea3d5639d647fc86e3974d6e1d1719/last_model.pt
|
package main;
import java.util.Scanner;
public class WindChillTemperature
{
public static void main(String[] args)
{
Scanner input = new Scanner(System.in);
System.out.print("Enter the temperature in Fahrenheit: ");
double temperature = input.nextDouble();
System.out.print("Enter the wind speed miles per hour: ");
double windSpeed = input.nextDouble();
double willChillIndex = 35.74 + (0.6215 * temperature) - (35.75 * (Math.pow(windSpeed, 0.16))) +
(0.4275 * temperature * (Math.pow(windSpeed, 0.16)));
System.out.print("The wind chill index is " + willChillIndex);
input.close();
}
} |
package com.wmedya.javatools.test;
import org.junit.Before;
import org.junit.Test;
import com.wmedya.javatools.numbertoword.lang.EnglishNumberToWord;
import junit.framework.Assert;
public class EnglishNumberToWordTests {
private EnglishNumberToWord toWords;
@Before
public void setUp() {
toWords = new EnglishNumberToWord();
}
@Test
public void oneDigitNumberTest() {
String number = "6";
String output = toWords.readDigits(number);
Assert.assertEquals("six", output);
}
@Test
public void twoDigitsBelowTwentyNumberTest() {
String number = "15";
String output = toWords.readDigits(number);
Assert.assertEquals("fifteen", output);
}
@Test
public void twoDigitsLastOneZeroNumberTest() {
String number = "70";
String output = toWords.readDigits(number);
Assert.assertEquals("seventy", output);
}
@Test
public void twoDigitsAboveTwentyNumberTest() {
String number = "46";
String output = toWords.readDigits(number);
Assert.assertEquals("fourty six", output);
}
@Test
public void threeDigitsBelowTwoHundredNumberTest() {
String number = "156";
String output = toWords.readDigits(number);
Assert.assertEquals("one hundred fifty six", output);
}
@Test
public void threeDigitsAboveTwoHundredNumberTest() {
String number = "785";
String output = toWords.readDigits(number);
Assert.assertEquals("seven hundred eighty five", output);
}
@Test
public void fourDigitsBelowTwoThousandNumberTest() {
String number = "1785";
String output = toWords.readDigits(number);
Assert.assertEquals("one thousand seven hundred eighty five", output);
}
@Test
public void fourDigitsAboveTwoThousandNumberTest() {
String number = "2643";
String output = toWords.readDigits(number);
Assert.assertEquals("two thousand six hundred fourty three", output);
}
@Test
public void fiveDigitsNumberTest() {
String number = "32643";
String output = toWords.readDigits(number);
Assert.assertEquals("thirty two thousand six hundred fourty three", output);
}
@Test
public void sixDigitsNumberTest() {
String number = "132643";
String output = toWords.readDigits(number);
Assert.assertEquals("one hundred thirty two thousand six hundred fourty three", output);
}
@Test
public void millionsNumberTest() {
String number = "1954785";
String output = toWords.readDigits(number);
Assert.assertEquals("one million nine hundred fifty four thousand seven hundred eighty five", output);
number = "21954785";
output = toWords.readDigits(number);
Assert.assertEquals("twenty one million nine hundred fifty four thousand seven hundred eighty five", output);
number = "321954785";
output = toWords.readDigits(number);
Assert.assertEquals("three hundred twenty one million nine hundred fifty four thousand seven hundred eighty five", output);
}
@Test
public void billionsNumberTest() {
String number = "5321954785";
String output = toWords.readDigits(number);
Assert.assertEquals("five billion three hundred twenty one million nine hundred fifty four thousand seven hundred eighty five", output);
number = "45321954785";
output = toWords.readDigits(number);
Assert.assertEquals("fourty five billion three hundred twenty one million nine hundred fifty four thousand seven hundred eighty five",
output);
number = "983321954785";
output = toWords.readDigits(number);
Assert.assertEquals(
"nine hundred eighty three billion three hundred twenty one million nine hundred fifty four thousand seven hundred eighty five",
output);
}
@Test
public void reallyBigNumberTest() {
String number = "983321954785983321954785";
String output = toWords.readDigits(number);
Assert.assertEquals(
"nine hundred eighty three sextillion three hundred twenty one quintillion nine hundred fifty four quadrillion seven hundred eighty five trillion nine hundred eighty three billion three hundred twenty one million nine hundred fifty four thousand seven hundred eighty five",
output);
}
@Test
public void extremePointsTest() {
String number = "70";
String output = toWords.readDigits(number);
Assert.assertEquals("seventy", output);
number = "700";
output = toWords.readDigits(number);
Assert.assertEquals("seven hundred", output);
number = "701";
output = toWords.readDigits(number);
Assert.assertEquals("seven hundred one", output);
number = "7000";
output = toWords.readDigits(number);
Assert.assertEquals("seven thousand", output);
number = "7001";
output = toWords.readDigits(number);
Assert.assertEquals("seven thousand one", output);
number = "70000";
output = toWords.readDigits(number);
Assert.assertEquals("seventy thousand", output);
number = "70001";
output = toWords.readDigits(number);
Assert.assertEquals("seventy thousand one", output);
number = "7100010001";
output = toWords.readDigits(number);
Assert.assertEquals("seven billion one hundred million ten thousand one", output);
number = "7000000001";
output = toWords.readDigits(number);
Assert.assertEquals("seven billion one", output);
}
@Test
public void specialCaseZerosNumberTest() {
String number = "0";
String output = toWords.readDigitsWithLeadingZeros(number);
Assert.assertEquals("oh", output);
number = "07";
output = toWords.readDigitsWithLeadingZeros(number);
Assert.assertEquals("oh seven", output);
number = "007";
output = toWords.readDigitsWithLeadingZeros(number);
Assert.assertEquals("double oh seven", output);
number = "0070";
output = toWords.readDigitsWithLeadingZeros(number);
Assert.assertEquals("double oh seventy", output);
number = "00700";
output = toWords.readDigitsWithLeadingZeros(number);
Assert.assertEquals("double oh seven hundred", output);
number = "00777";
output = toWords.readDigitsWithLeadingZeros(number);
Assert.assertEquals("double oh seven hundred seventy seven", output);
number = "0003";
output = toWords.readDigitsWithLeadingZeros(number);
Assert.assertEquals("triple oh three", output);
number = "000000003";
output = toWords.readDigitsWithLeadingZeros(number);
Assert.assertEquals("octuple oh three", output);
}
}
|
<reponame>dk123sw/hybrid-Development
package com.example.jingbin.webviewstudy.audio_record;
import android.Manifest;
import android.app.Activity;
import android.app.AppOpsManager;
import android.content.Context;
import android.content.pm.PackageManager;
import android.os.Build;
import android.support.v4.app.ActivityCompat;
import android.util.Log;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
public class PermissionUtil {
private static final String TAG = "PermissionUtil";
public static int requestPermissions(Context context, String[] permissions, int request) {
if (Build.VERSION.SDK_INT >= 23) {
List<String> permissionList = new ArrayList<>();
for (int i = 0; i < permissions.length; i++) {
if (ActivityCompat.checkSelfPermission(context, permissions[i]) != PackageManager.PERMISSION_GRANTED) {
permissionList.add(permissions[i]);
}
}
if (permissionList.size() > 0) {
ActivityCompat.requestPermissions((Activity) context, permissionList.toArray(new String[permissionList.size()]), request);
return permissionList.size();
}
}
return 0;
}
public static boolean isHasSystemAlertWindow(Context context) {
return ActivityCompat.checkSelfPermission(context, Manifest.permission.SYSTEM_ALERT_WINDOW) == PackageManager.PERMISSION_GRANTED;
}
/**
* 文件权限读写
*
* @param context
*/
public static void requestSDCardWrite(Context context) {
if (Build.VERSION.SDK_INT >= 23) {
if (!isHasSDCardWritePermission(context)) {
ActivityCompat.requestPermissions((Activity) context, PermissionManager.PERMISSION_SD_WRITE, PermissionManager.REQUEST_SD_WRITE);
}
}
}
/**
* 判断是否有文件读写的权限
*
* @param context
* @return
*/
public static boolean isHasSDCardWritePermission(Context context) {
return ActivityCompat.checkSelfPermission(context, Manifest.permission.WRITE_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED;
}
/**
* 请求地理位置
*
* @param context
*/
public static void requestLocationPermission(Context context) {
if (Build.VERSION.SDK_INT >= 23) {
if (!isHasLocationPermission(context)) {
ActivityCompat.requestPermissions((Activity) context, PermissionManager.PERMISSION_LOCATION, PermissionManager.REQUEST_LOCATION);
}
}
}
/**
* 判断是否有地理位置
*
* @param context
* @return
*/
public static boolean isHasLocationPermission(Context context) {
return ActivityCompat.checkSelfPermission(context, Manifest.permission.ACCESS_COARSE_LOCATION) == PackageManager.PERMISSION_GRANTED;
}
/**
* @param context
* @return
*/
public static boolean isHasCameraPermission(Context context) {
return ActivityCompat.checkSelfPermission(context, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED;
}
public static void requestCameraPermission(Context context) {
if (Build.VERSION.SDK_INT >= 23) {
if (!isHasCameraPermission(context)) {
Log.d(TAG, "requestCameraPermission: ----->" + true);
ActivityCompat.requestPermissions((Activity) context, PermissionManager.PERMISSION_CAMERA, PermissionManager.REQUEST_CAMERA);
}
}
}
/**
* 请求录音权限
*
* @param context
*/
public static void requestRecordPermission(Context context) {
if (Build.VERSION.SDK_INT >= 23) {
if (!isHasRecordPermission(context)) {
ActivityCompat.requestPermissions((Activity) context, PermissionManager.PERMISSION_RECORD, PermissionManager.REQUEST_RECORD);
}
}
}
public static boolean isHasRecordPermission(Context context) {
return ActivityCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED;
}
/**
* @param context
* @return
*/
public static boolean isHasReadPhonePermission(Context context) {
return ActivityCompat.checkSelfPermission(context, Manifest.permission.READ_PHONE_STATE) == PackageManager.PERMISSION_GRANTED;
}
/**
* @param context
*/
public static void requestReadPhonePermission(Context context) {
if (Build.VERSION.SDK_INT >= 23) {
if (!isHasReadPhonePermission(context)) {
ActivityCompat.requestPermissions((Activity) context, PermissionManager.PERMISSION_READ_PHONE, PermissionManager.REQUEST_READ_PHONE);
}
}
}
/**
* 判断是否开启通知权限
*/
public static boolean isNotificationEnabled(Context context) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return true;
}
try {
AppOpsManager mAppOps = (AppOpsManager) context.getSystemService(Context.APP_OPS_SERVICE);
Class appOpsClass = Class.forName(AppOpsManager.class.getName());
Field opPostNotificationValue = appOpsClass.getDeclaredField("OP_POST_NOTIFICATION");
int value = (int) opPostNotificationValue.get(Integer.class);
//app uid
int uid = context.getApplicationInfo().uid;
//app 包名
String pkg = context.getApplicationContext().getPackageName();
//检查权限
Method checkOpNoThrowMethod = appOpsClass.getMethod("checkOpNoThrow", Integer.TYPE, Integer.TYPE, String.class);
return ((int) checkOpNoThrowMethod.invoke(mAppOps, value, uid, pkg) == AppOpsManager.MODE_ALLOWED);
} catch (Exception e) {
e.printStackTrace();
}
return true;
}
}
|
package com.github.peacetrue.beans.create;
import com.github.peacetrue.beans.properties.createtime.CreatedTime;
import com.github.peacetrue.beans.properties.creatorid.CreatorId;
/**
* @author peace
* @since 1.0
**/
public interface Create<T, S> extends
CreateCapable<T, S>, CreateAware<T, S>,
CreatorId<T>, CreatedTime<S> {
}
|
<gh_stars>1-10
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.course.assessment.ui.tool;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.form.flexible.FormItem;
import org.olat.core.gui.components.form.flexible.FormItemContainer;
import org.olat.core.gui.components.form.flexible.elements.FlexiTableElement;
import org.olat.core.gui.components.form.flexible.elements.FormLink;
import org.olat.core.gui.components.form.flexible.impl.FormBasicController;
import org.olat.core.gui.components.form.flexible.impl.FormEvent;
import org.olat.core.gui.components.form.flexible.impl.elements.table.DefaultFlexiColumnModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiTableColumnModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiTableDataModelFactory;
import org.olat.core.gui.components.form.flexible.impl.elements.table.SelectionEvent;
import org.olat.core.gui.components.link.Link;
import org.olat.core.gui.components.tree.TreeModel;
import org.olat.core.gui.components.tree.TreeNode;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.gui.control.generic.closablewrapper.CalloutSettings;
import org.olat.core.gui.control.generic.closablewrapper.CloseableCalloutWindowController;
import org.olat.core.id.Identity;
import org.olat.core.util.Util;
import org.olat.core.util.tree.TreeVisitor;
import org.olat.course.CourseFactory;
import org.olat.course.ICourse;
import org.olat.course.assessment.AssessmentHelper;
import org.olat.course.assessment.AssessmentModule;
import org.olat.course.assessment.IndentedNodeRenderer;
import org.olat.course.assessment.ui.tool.CourseNodeToReviewTableModel.ToReviewCols;
import org.olat.course.assessment.ui.tool.event.CourseNodeIdentityEvent;
import org.olat.course.nodes.CourseNode;
import org.olat.modules.assessment.AssessmentEntry;
import org.olat.modules.assessment.ui.AssessedIdentityListState;
import org.olat.modules.assessment.ui.AssessmentToolSecurityCallback;
import org.olat.repository.RepositoryEntry;
import org.olat.user.UserManager;
import org.springframework.beans.factory.annotation.Autowired;
/**
*
* Initial date: 26 Nov 2021<br>
* @author uhensler, <EMAIL>, http://www.frentix.com
*
*/
public abstract class CourseNodeToReviewAbstractSmallController extends FormBasicController {
private static final String CMD_IDENTITIES = "identities";
private static final String CMD_IDENTITY = "identity";
private FlexiTableElement tableEl;
private CourseNodeToReviewTableModel usersTableModel;
private CloseableCalloutWindowController ccwc;
private Controller identitySelectionCtrl;
protected final RepositoryEntry courseEntry;
protected final AssessmentToolSecurityCallback assessmentCallback;
protected final Map<String, CourseNode> nodeIdentToCourseNode;
private int counter = 0;
@Autowired
private UserManager userManager;
protected abstract String getIconCssClass();
protected abstract String getTitleI18nKey();
protected abstract String getTitleNumberI18nKey();
protected abstract String getTableEmptyI18nKey();
protected abstract Map<String, List<AssessmentEntry>> loadNodeIdentToEntries();
protected abstract Supplier<AssessedIdentityListState> getIdentityFilter();
protected CourseNodeToReviewAbstractSmallController(UserRequest ureq, WindowControl wControl,
RepositoryEntry courseEntry, AssessmentToolSecurityCallback assessmentCallback) {
super(ureq, wControl, "overview_to_review");
setTranslator(Util.createPackageTranslator(AssessmentModule.class, getLocale(), getTranslator()));
setTranslator(userManager.getPropertyHandlerTranslator(getTranslator()));
this.courseEntry = courseEntry;
this.assessmentCallback = assessmentCallback;
nodeIdentToCourseNode = new HashMap<>();
ICourse course = CourseFactory.loadCourse(courseEntry);
TreeModel tm = AssessmentHelper.assessmentTreeModel(course);
new TreeVisitor(node -> {
if(node instanceof TreeNode) {
Object uobject = ((TreeNode)node).getUserObject();
if(uobject instanceof CourseNode) {
CourseNode tNode = (CourseNode)uobject;
nodeIdentToCourseNode.put(tNode.getIdent(), tNode);
}
}
}, tm.getRootNode(), false).visitAll();
initForm(ureq);
loadModel();
}
@Override
protected void initForm(FormItemContainer formLayout, Controller listener, UserRequest ureq) {
flc.contextPut("iconCssClass", getIconCssClass());
FlexiTableColumnModel columnsModel = FlexiTableDataModelFactory.createFlexiTableColumnModel();
IndentedNodeRenderer intendedNodeRenderer = new IndentedNodeRenderer();
intendedNodeRenderer.setIndentationEnabled(false);
DefaultFlexiColumnModel nodeModel = new DefaultFlexiColumnModel(ToReviewCols.courseNode, CMD_IDENTITIES, intendedNodeRenderer);
columnsModel.addFlexiColumnModel(nodeModel);
columnsModel.addFlexiColumnModel(new DefaultFlexiColumnModel(ToReviewCols.participant));
usersTableModel = new CourseNodeToReviewTableModel(columnsModel);
tableEl = uifactory.addTableElement(getWindowControl(), "table", usersTableModel, 20, false, getTranslator(), formLayout);
tableEl.setNumOfRowsEnabled(false);
tableEl.setExportEnabled(false);
tableEl.setCustomizeColumns(false);
tableEl.setEmptyTableSettings(getTableEmptyI18nKey(), null, getIconCssClass());
}
private void loadModel() {
Map<String, List<AssessmentEntry>> nodeIdentToEntries = loadNodeIdentToEntries();
List<CourseNodeToReviewRow> rows = new ArrayList<>(nodeIdentToEntries.size());
for (Map.Entry<String, List<AssessmentEntry>> entry : nodeIdentToEntries.entrySet()) {
String nodeIdent = entry.getKey();
if (nodeIdentToCourseNode.containsKey(nodeIdent)) {
List<Identity> identities = entry.getValue().stream()
.map(AssessmentEntry::getIdentity)
.distinct()
.collect(Collectors.toList());
if (!identities.isEmpty()) {
CourseNodeToReviewRow row = new CourseNodeToReviewRow(nodeIdentToCourseNode.get(nodeIdent), identities);
String identityLabel = identities.size() == 1
? userManager.getUserDisplayName(identities.get(0))
: translate("participants.to.review", Integer.toString(identities.size())) + " <i class='o_icon o_icon_info'> </i>";
FormLink identityLink = uifactory.addFormLink("o_user_" + counter++, CMD_IDENTITY, identityLabel, null, null, Link.NONTRANSLATED);
identityLink.setUserObject(row);
row.setIdentityLink(identityLink);
rows.add(row);
}
}
}
int numReviews = rows.stream().mapToInt(row -> row.getIdentities().size()).sum();
String title = numReviews > 0
? translate(getTitleNumberI18nKey(), Integer.toString(numReviews))
: translate(getTitleI18nKey());
flc.contextPut("title", title);
usersTableModel.setObjects(rows);
tableEl.reset();
}
@Override
protected void formInnerEvent(UserRequest ureq, FormItem source, FormEvent event) {
if(source == tableEl) {
if(event instanceof SelectionEvent) {
SelectionEvent se = (SelectionEvent)event;
if(CMD_IDENTITIES.equals(se.getCommand())) {
int index = se.getIndex();
CourseNodeToReviewRow row = usersTableModel.getObject(index);
doSelectIdentity(ureq, row.getCourseNodeIdent(), null);
}
}
} else if (source instanceof FormLink) {
FormLink link = (FormLink)source;
if (CMD_IDENTITY.equals(link.getCmd())) {
CourseNodeToReviewRow row = (CourseNodeToReviewRow)link.getUserObject();
doSelectIdentity(ureq, row, link);
}
}
super.formInnerEvent(ureq, source, event);
}
@Override
protected void event(UserRequest ureq, Controller source, Event event) {
if (source == identitySelectionCtrl) {
ccwc.deactivate();
cleanUp();
fireEvent(ureq, event);
}
super.event(ureq, source, event);
}
private void cleanUp() {
removeAsListenerAndDispose(identitySelectionCtrl);
removeAsListenerAndDispose(ccwc);
identitySelectionCtrl = null;
ccwc = null;
}
@Override
protected void formOK(UserRequest ureq) {
//
}
private void doSelectIdentity(UserRequest ureq, CourseNodeToReviewRow row, FormLink link) {
removeAsListenerAndDispose(ccwc);
removeAsListenerAndDispose(identitySelectionCtrl);
if (row.getIdentities().size() <=1) {
Identity identity = row.getIdentities().size() == 1? row.getIdentities().get(0): null;
doSelectIdentity(ureq, row.getCourseNodeIdent(), identity);
} else {
identitySelectionCtrl = new IdentitySelectionController(ureq, getWindowControl(), row.getCourseNodeIdent(),
row.getIdentities(), getIdentityFilter());
listenTo(identitySelectionCtrl);
CalloutSettings settings = new CalloutSettings();
ccwc = new CloseableCalloutWindowController(ureq, getWindowControl(), identitySelectionCtrl.getInitialComponent(),
link.getFormDispatchId(), "", true, "", settings);
listenTo(ccwc);
ccwc.activate();
}
}
private void doSelectIdentity(UserRequest ureq, String courseNodeIdent, Identity identity) {
fireEvent(ureq, new CourseNodeIdentityEvent(courseNodeIdent, identity, getIdentityFilter()));
}
} |
public class Student {
private String name;
private int rollNo;
private int marks1, marks2, marks3;
public Student(String name, int rollNo, int marks1, int marks2, int marks3) {
this.name = name;
this.rollNo = rollNo;
this.marks1 = marks1;
this.marks2 = marks2;
this.marks3 = marks3;
}
public String getName() {
return this.name;
}
public int getRollNo() {
return this.rollNo;
}
public int getMarks1() {
return this.marks1;
}
public int getMarks2() {
return this.marks2;
}
public int getMarks3() {
return this.marks3;
}
} |
#!/usr/bin/env bash
# Goal of the script :
# 1) Deploy Launcher mission control template using the parameters passed to authenticate the user,
# 2) Setup the Github identity (account & token) &
# 3) Patch jenkins to use admin as role
#
# Command to be used
# ./deploy_launcher_minishift.sh -p projectName -i username:password -g myGithubUser:myGithubToken OR
# ./deploy_launcher_minishift.sh -p projectName -t myOpenShiftToken -g myGithubUser:myGithubToken OR
# ./deploy_launcher_minishift.sh -p projectName -i username:password -g myGithubUser:myGithubToken -v v3
# Set Default values
PROJECTNAME="devex"
id="admin:admin"
VERSION="v15"
CATALOG="https://github.com/fabric8-launcher/launcher-booster-catalog.git"
while getopts p:g:t:i:v:c: option
do
case "${option}"
in
p) PROJECTNAME=${OPTARG};;
g) github=${OPTARG};;
i) id=${OPTARG};;
t) TOKEN=${OPTARG};;
v) VERSION=${OPTARG};;
c) CATALOG=${OPTARG};;
esac
done
CONSOLE_URL=$(minishift console --url)
IFS=':' read -a IDENTITY <<< "$id"
IFS=':' read -a GITHUB_IDENTITY <<< "$github"
HOSTNAMEORIP=$(echo $CONSOLE_URL | grep -E -o "([0-9]{1,3}[\.]){3}[0-9]{1,3}")
echo "-----------------Parameters -------------------------"
echo "Host: $HOSTNAMEORIP"
echo "Project: $PROJECTNAME"
echo "Console: $CONSOLE_URL"
echo "Github user: ${GITHUB_IDENTITY[0]}"
echo "Github token: ${GITHUB_IDENTITY[1]}"
echo "Identity: ${IDENTITY[0]}, ${IDENTITY[1]}"
echo "Version: $VERSION"
echo "Catalog: $CATALOG"
echo "------------------------------------------"
echo "------------------- Log on to OpenShift Platform -----------------------"
if [ "$TOKEN" != "" ]; then
oc login $CONSOLE_URL --token=$TOKEN
else
echo "oc login $CONSOLE_URL -u ${IDENTITY[0]} -p ${IDENTITY[1]}"
oc login $CONSOLE_URL -u ${IDENTITY[0]} -p ${IDENTITY[1]}
fi
echo "------------------------------------------"
# Create Project where launcher-mission control will be deployed
echo "------------------ Create New Project ----------------------"
oc new-project $PROJECTNAME
echo "------------------------------------------"
# Install the launchpad-missioncontrol template
echo "----------------- Install Launchpad template --------------------"
oc create -n $PROJECTNAME -f https://raw.githubusercontent.com/fabric8-launcher/launcher-openshift-templates/$VERSION/openshift/launcher-template.yaml
echo "------------------------------------------"
echo "------------------ Create launch pad mission application ---------------------"
oc new-app fabric8-launcher -n $PROJECTNAME \
-p LAUNCHER_BOOSTER_CATALOG_REF="master" \
-p LAUNCHER_BOOSTER_CATALOG_REPOSITORY=$CATALOG \
-p LAUNCHER_MISSIONCONTROL_GITHUB_USERNAME=${GITHUB_IDENTITY[0]} \
-p LAUNCHER_MISSIONCONTROL_GITHUB_TOKEN=${GITHUB_IDENTITY[1]} \
-p LAUNCHER_MISSIONCONTROL_OPENSHIFT_CONSOLE_URL=$CONSOLE_URL \
-p LAUNCHER_MISSIONCONTROL_OPENSHIFT_API_URL=$CONSOLE_URL \
-p LAUNCHER_MISSIONCONTROL_OPENSHIFT_USERNAME=${IDENTITY[0]} \
-p LAUNCHER_MISSIONCONTROL_OPENSHIFT_PASSWORD=${IDENTITY[1]} \
-p LAUNCHER_KEYCLOAK_URL= \
-p LAUNCHER_KEYCLOAK_REALM=
echo "------------------------------------------" |
<gh_stars>1-10
# -*- coding: utf-8 -*-
# API REST:
from rest_framework import serializers
# Modelos:
from django.contrib.auth.models import User
from .models import Profile
class UserSerializer(serializers.HyperlinkedModelSerializer):
full_name = serializers.SerializerMethodField()
class Meta:
model = User
fields = (
'pk',
'url',
'username',
'first_name',
'last_name',
'full_name',
'email',
'is_active',
)
def get_full_name(self, obj):
try:
return obj.get_full_name()
except:
return 0
class ProfileSerializer(serializers.HyperlinkedModelSerializer):
username = serializers.SerializerMethodField()
class Meta:
model = Profile
fields = (
'pk',
'url',
'username',
'puesto',
'clave',
'fecha_nacimiento',
'imagen',
'firma',
'costo',
'comentarios',
)
def get_username(self, obj):
try:
return obj.user.username
except:
return ""
class ProfileExcelSerializer(serializers.HyperlinkedModelSerializer):
username = serializers.SerializerMethodField()
full_name = serializers.SerializerMethodField()
email = serializers.SerializerMethodField()
class Meta:
model = Profile
fields = (
'pk',
'url',
'username',
'full_name',
'puesto',
'clave',
'email',
'fecha_nacimiento',
'imagen',
'costo',
'comentarios',
)
def get_username(self, obj):
try:
return obj.user.username
except:
return ""
def get_full_name(self, obj):
try:
return obj.user.get_full_name()
except:
return 0
def get_email(self, obj):
try:
return obj.user.email
except:
return 0
|
class Game {
constructor() {}
getState(){
var gameState = database.ref('gameState')
gameState.on("value",function(data){
gameState = data.val()
})
}
start() {
player = new Player();
player.GetCount()
form = new Form();
form.display();
}
}
|
/*
Copyright 1991, 1998 The Open Group
Permission to use, copy, modify, distribute, and sell this software and its
documentation for any purpose is hereby granted without fee, provided that
the above copyright notice appear in all copies and that both that
copyright notice and this permission notice appear in supporting
documentation.
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE OPEN GROUP BE LIABLE FOR ANY CLAIM, DAMAGES OR
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of The Open Group shall
not be used in advertising or otherwise to promote the sale, use or
other dealings in this Software without prior written authorization
from The Open Group.
*/
/*
* Copyright 1991 by the Open Software Foundation
*
* Permission to use, copy, modify, distribute, and sell this software and its
* documentation for any purpose is hereby granted without fee, provided that
* the above copyright notice appear in all copies and that both that
* copyright notice and this permission notice appear in supporting
* documentation, and that the name of Open Software Foundation
* not be used in advertising or publicity pertaining to distribution of the
* software without specific, written prior permission. Open Software
* Foundation makes no representations about the suitability of this
* software for any purpose. It is provided "as is" without express or
* implied warranty.
*
* OPEN SOFTWARE FOUNDATION DISCLAIMS ALL WARRANTIES WITH REGARD TO
* THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS, IN NO EVENT SHALL OPEN SOFTWARE FOUNDATIONN BE
* LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
* <NAME> OSF
*/
/*
* Copyright 2000 by <NAME>
*
* Permission to use, copy, modify, distribute, and sell this software
* and its documentation for any purpose is hereby granted without fee,
* provided that the above copyright notice appear in all copies and
* that both that copyright notice and this permission notice appear
* in supporting documentation, and that the name of Bruno Haible not
* be used in advertising or publicity pertaining to distribution of the
* software without specific, written prior permission. Bruno Haible
* makes no representations about the suitability of this software for
* any purpose. It is provided "as is" without express or implied
* warranty.
*
* Bruno Haible DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
* INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
* NO EVENT SHALL Bruno Haible BE LIABLE FOR ANY SPECIAL, INDIRECT OR
* CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
* OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
* OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE
* OR PERFORMANCE OF THIS SOFTWARE.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include "Xlibint.h"
#include "Xlcint.h"
void
Xutf8DrawText(
Display *dpy,
Drawable d,
GC gc,
int x,
int y,
XmbTextItem *text_items,
int nitems)
{
register XFontSet fs = NULL;
register XmbTextItem *p = text_items;
register int i = nitems;
register int esc;
/* ignore leading items with no fontset */
while (i && !p->font_set) {
i--;
p++;
}
for (; --i >= 0; p++) {
if (p->font_set)
fs = p->font_set;
x += p->delta;
esc = (*fs->methods->utf8_draw_string) (dpy, d, fs, gc, x, y,
p->chars, p->nchars);
if (!esc)
esc = fs->methods->utf8_escapement (fs, p->chars, p->nchars);
x += esc;
}
}
void
Xutf8DrawString(
Display *dpy,
Drawable d,
XFontSet font_set,
GC gc,
int x,
int y,
_Xconst char *text,
int text_len)
{
(void)(*font_set->methods->utf8_draw_string) (dpy, d, font_set, gc, x, y,
(char *)text, text_len);
}
void
Xutf8DrawImageString(
Display *dpy,
Drawable d,
XFontSet font_set,
GC gc,
int x,
int y,
_Xconst char *text,
int text_len)
{
(*font_set->methods->utf8_draw_image_string) (dpy, d, font_set, gc, x, y,
(char *)text, text_len);
}
int
Xutf8TextEscapement(
XFontSet font_set,
_Xconst char *text,
int text_len)
{
return (*font_set->methods->utf8_escapement) (font_set,
(char *)text, text_len);
}
int
Xutf8TextExtents(
XFontSet font_set,
_Xconst char *text,
int text_len,
XRectangle *overall_ink_extents,
XRectangle *overall_logical_extents)
{
return (*font_set->methods->utf8_extents) (font_set,
(char *)text, text_len,
overall_ink_extents,
overall_logical_extents);
}
Status
Xutf8TextPerCharExtents(
XFontSet font_set,
_Xconst char *text,
int text_len,
XRectangle *ink_extents_buffer,
XRectangle *logical_extents_buffer,
int buffer_size,
int *num_chars,
XRectangle *max_ink_extents,
XRectangle *max_logical_extents)
{
return (*font_set->methods->utf8_extents_per_char)
(font_set, (char *)text, text_len,
ink_extents_buffer, logical_extents_buffer,
buffer_size, num_chars, max_ink_extents, max_logical_extents);
}
|
package de.unibi.agbi.biodwh2.reactome.entities;
import org.neo4j.ogm.annotation.NodeEntity;
/**
* Created by manuel on 12.12.19.
*/
@NodeEntity(label = "URL")
public class URL extends Publication {
public String uniformResourceLoader;
public URL() {
}
public String getUniformResourceLoader() {
return uniformResourceLoader;
}
}
|
<filename>OLD/js/config.template.js
// TO MAKE THE MAP APPEAR YOU MUST
// ADD YOUR ACCESS TOKEN FROM
// https://account.mapbox.com
const mapBoxToken = '<your access token here>';
|
<gh_stars>100-1000
/*
Package parsec provides a library of parser-combinators. The basic
idea behind parsec module is that, it allows programmers to compose
basic set of terminal parsers, a.k.a tokenizers and compose them
together as a tree of parsers, using combinators like: And,
OrdChoice, Kleene, Many, Maybe.
To begin with there are four basic Types that needs to be kept in
mind while creating and composing parsers,
Types
Scanner, an interface type that encapsulates the input text. A built
in scanner called SimpleScanner is supplied along with this package.
Developers can also implement their own scanner types. Following
example create a new instance of SimpleScanner, using an input
text:
var exprText = []byte(`4 + 123 + 23 + 67 +89 + 87 *78`)
s := parsec.NewScanner(exprText)
Nodify, callback function is supplied while combining parser
functions. If the underlying parsing logic matches with i/p text,
then callback will be dispatched with list of matching ParsecNode.
Value returned by callback function will further be used as
ParsecNode item in higher-level list of ParsecNodes.
Parser, simple parsers are functions that matches i/p text for
specific patterns. Simple parsers can be combined using one of the
supplied combinators to construct a higher level parser. A parser
function takes a Scanner object and applies the underlying parsing
logic, if underlying logic succeeds Nodify callback is dispatched
and a ParsecNode and a new Scanner object (with its cursor moved
forward) is returned. If parser fails to match, it shall return
the input scanner object as it is, along with nil ParsecNode.
ParsecNode, an interface type encapsulates one or more tokens from
i/p text, as terminal node or non-terminal node.
Combinators
If input text is going to be a single token like `10` or `true` or
`"some string"`, then all we need is a single Parser function that
can tokenize the i/p text into a terminal node. But our applications
are seldom that simple. Almost all the time we need to parse the i/p
text for more than one tokens and most of the time we need to
compose them into a tree of terminal and non-terminal nodes.
This is where combinators are useful. Package provides a set of
combinators to help combine terminal parsers into higher level
parsers. They are,
* And, to combine a sequence of terminals and non-terminal parsers.
* OrdChoice, to choose between specified list of parsers.
* Kleene, to repeat the parser zero or more times.
* Many, to repeat the parser one or more times.
* ManyUntil, to repeat the parser until a specified end matcher.
* Maybe, to apply the parser once or none.
All the above mentioned combinators accept one or more parser function
as arguments, either by value or by reference. The reason for allowing
parser argument by reference is to be able to define recursive
parsing logic, like parsing nested arrays:
var Y Parser
var value Parser // circular rats
var opensqrt = Atom("[", "OPENSQRT")
var closesqrt = Atom("]", "CLOSESQRT")
var values = Kleene(nil, &value, Atom(",", "COMMA"))
var array = And(nil, opensqrt, values, closeSqrt)
func init() {
value = parsec.OrdChoice(nil, Int(), Bool(), String(), array)
Y = parsec.OrdChoice(nil, value)
}
Terminal parsers
Parsers for standard set of tokens are supplied along with this package.
Most of these parsers return Terminal type as ParseNode.
* Char, match a single character skipping leading whitespace.
* Float, match a float literal skipping leading whitespace.
* Hex, match a hexadecimal literal skipping leading whitespace.
* Int, match a decimal number literal skipping leading whitespace.
* Oct, match a octal number literal skipping leading whitespace.
* String, match a string literal skipping leading whitespace.
* Ident, match a identifier token skipping leading whitespace.
* Atom, match a single atom skipping leading whitespace.
* AtomExact, match a single atom without skipping leading whitespace.
* Token, match a single token skipping leading whitespace.
* TokenExact, match a single token without skipping leading whitespace.
* OrdToken, match a single token with specified list of alternatives.
* End, match end of text.
* NoEnd, match not an end of text.
All of the terminal parsers, except End and NoEnd return Terminal type
as ParsecNode. While End and NoEnd return a boolean type as ParsecNode.
AST and Queryable
This is an experimental feature to use CSS like selectors for quering
an Abstract Syntax Tree (AST). Types, APIs and methods associated with
AST and Queryable are unstable, and are expected to change in future.
While Scanner, Parser, ParsecNode types are re-used in AST and Queryable,
combinator functions are re-implemented as AST methods. Similarly type
ASTNodify is to be used instead of Nodify type. Otherwise all the
parsec techniques mentioned above are equally applicable on AST.
Additionally, following points are worth noting while using AST,
* Combinator methods supplied via AST can be named.
* All combinators from AST object will create and return NonTerminal
as the Queryable type.
* ASTNodify function can interpret its Queryable argument and return
a different type implementing Queryable interface.
*/
package parsec
|
require 'test_helper'
class StringExtensionTest < ActiveSupport::TestCase
test 'string#present_tense should exist' do
assert "".respond_to? :present_tense
end
test 'string#present_tense converts our words' do
assert "opened".present_tense == "open"
assert "reopened".present_tense == "reopen"
assert "closed".present_tense == "close"
assert "labeled".present_tense == "label"
assert "unlabeled".present_tense == "unlabel"
assert "assigned".present_tense == "assign"
assert "unassigned".present_tense == "unassign"
assert "edited".present_tense == "edit"
end
end
|
import argparse
from typing import Optional
def add_parser(subparsers: Optional[argparse._SubParsersAction] = None):
subcommand_name = "filesystem"
subcommand_help = "ファイル操作関係(Web APIにアクセスしない)のサブコマンド"
description = "ファイル操作関係(Web APIにアクセスしない)のサブコマンド"
if subparsers is not None:
# Add the "filesystem" subcommand to the CLI parser
filesystem_parser = subparsers.add_parser(subcommand_name, help=subcommand_help, description=description)
# Additional code for handling file-related operations can be added here
else:
raise ValueError("subparsers cannot be None") |
import { ITableDataDescription } from './ITable';
/**
* Created by <NAME> on 04.08.2014.
*/
export declare class TableUtils {
static createDefaultTableDesc(): ITableDataDescription;
}
|
import ParliamentAPIUtils from 'api/ParliamentAPIUtils';
import { MemberOfParliament } from 'api/ParliamentTypes';
/**
* Politician Model
*/
export default class Politician {
private static readonly BASE_URL = 'MemberProfile';
private readonly _parliamentPolitician: MemberOfParliament;
constructor(parliamentPolitician: MemberOfParliament) {
this._parliamentPolitician = parliamentPolitician;
}
/**
* Get the unique identifier for this politician
* */
public getId(): number {
return this._parliamentPolitician.id;
}
/**
* Get link to the view URL for this politician
* @returns The full link to their profile
*/
public getLinkToProfile(): string {
return ParliamentAPIUtils.getViewURL(`${Politician.BASE_URL}/${this.getId()}`);
}
/**
* Gets the english party name of this politician
*/
public getPartyName(): string {
return this._parliamentPolitician.caucusShortName;
}
// #region Contact Info
/**
* Get the politicans phone number
* @returns
*/
public getPhoneNumber(): string {
// todo implement
return 'TODO: Get phone numbers';
}
/**
* Get politicians twitter url, if known
*/
public getTwitterUrl(): string | undefined {
return 'TODO get twitterurl';
}
/**
* Get politicians facebook url, if known
*/
public getFacebookUrl(): string | undefined {
// todo get this value
return 'TODO get facebook';
}
// #endregion Contact Info
/**
* Get the photo url of this politician
*/
public getPhotoURL(): string | undefined {
return undefined;
// return ParliamentAPIUtils.getAPIURL(this._parliamentPolitician.image);
}
/**
* Get this politicians full name
*/
public getFullName(): string {
return `${this._parliamentPolitician.personOfficialFirstName} ${this._parliamentPolitician.personOfficialLastName}`;
}
/**
* Get the riding this politician represents
*/
public getRiding(): string {
return `${this._parliamentPolitician.constituencyName} (${this._parliamentPolitician.constituencyProvinceTerritoryName})`;
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.