text
stringlengths 1
1.05M
|
|---|
PYTHON=/zfsauton3/home/bpatra/miniconda3/bin/python3.6
${PYTHON} -m ccm_model.codl_main --config_file training_configs/ccm_crf_features_bert_codl/ner_type_attr.jsonnet --base_dir ./trained_model_outputs/ccm_crf_features_bert_codl --devices 0 --start_index 35 --end_index 70
|
TERMUX_PKG_HOMEPAGE=https://marlam.de/msmtp/
TERMUX_PKG_DESCRIPTION="Lightweight SMTP client"
TERMUX_PKG_LICENSE="GPL-3.0"
TERMUX_PKG_VERSION=1.8.10
TERMUX_PKG_REVISION=1
TERMUX_PKG_SRCURL=https://marlam.de/msmtp/releases/msmtp-$TERMUX_PKG_VERSION.tar.xz
TERMUX_PKG_SHA256=caba7f39d19df7a31782fe7336dd640c61ea33b92f987bd5423bca9683482f10
TERMUX_PKG_DEPENDS="openssl, libidn2"
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="--with-tls=openssl"
termux_step_pre_configure() {
autoreconf -if
}
|
package controllers
import (
"crud-rest-api-golang/common"
"crud-rest-api-golang/models"
"crud-rest-api-golang/serializers"
"crud-rest-api-golang/validator"
"errors"
"fmt"
"net/http"
"strconv"
"github.com/gin-gonic/gin"
)
func ArticleCreate(c *gin.Context) {
articleModelValidator := validator.NewArticleModelValidator()
if err := articleModelValidator.Bind(c); err != nil {
c.JSON(http.StatusUnprocessableEntity, common.NewValidatorError(err))
return
}
fmt.Println(articleModelValidator.ArticleModel.Author.UserModel)
if err := models.SaveOne(&articleModelValidator.ArticleModel); err != nil {
c.JSON(http.StatusUnprocessableEntity, common.NewError("database", err))
return
}
serializer := serializers.ArticleSerializer{c, articleModelValidator.ArticleModel}
c.JSON(http.StatusCreated, gin.H{"article": serializer.Response()})
}
func ArticleUpdate(c *gin.Context) {
slug := c.Param("slug")
articleModel, err := models.FindOneArticle(slug)
if err != nil {
c.JSON(http.StatusNotFound, common.NewError("articles", errors.New("Invalid slug")))
return
}
articleModelValidator := validator.NewArticleModelValidatorFillWith(articleModel)
if err := articleModelValidator.Bind(c); err != nil {
c.JSON(http.StatusUnprocessableEntity, common.NewValidatorError(err))
return
}
articleModelValidator.ArticleModel.ID = articleModel.ID
if err := articleModel.Update(&articleModelValidator.ArticleModel); err != nil {
c.JSON(http.StatusUnprocessableEntity, common.NewError("database", err))
return
}
serializer := serializers.ArticleSerializer{c, articleModelValidator.ArticleModel}
c.JSON(http.StatusOK, gin.H{"article": serializer.Response()})
}
func ArticleDelete(c *gin.Context) {
slug := c.Param("slug")
model, err := models.FindOneArticle(slug)
if err != nil {
c.JSON(http.StatusNotFound, common.NewError("articles", errors.New("Invalid slug")))
return
}
err = models.DeleteArticleModel(model)
if err != nil {
c.JSON(http.StatusUnprocessableEntity, common.NewError("database", err))
return
}
c.JSON(http.StatusOK, gin.H{"article": "Delete article success"})
}
func ArticleFavorite(c *gin.Context) {
slug := c.Param("slug")
articleModel, err := models.FindOneArticle(slug)
if err != nil {
c.JSON(http.StatusNotFound, common.NewError("articles", errors.New("Invalid slug")))
return
}
myUserModel := c.MustGet("my_user_model").(models.UserModel)
err = articleModel.FavoriteBy(models.GetArticleUserModel(myUserModel))
if err != nil {
c.JSON(http.StatusUnprocessableEntity, common.NewError("database", err))
return
}
serializer := serializers.ArticleSerializer{c, articleModel}
c.JSON(http.StatusOK, gin.H{"article": serializer.Response()})
}
func ArticleUnfavorite(c *gin.Context) {
slug := c.Param("slug")
articleModel, err := models.FindOneArticle(slug)
if err != nil {
c.JSON(http.StatusNotFound, common.NewError("articles", errors.New("Invalid slug")))
return
}
myUserModel := c.MustGet("my_user_model").(models.UserModel)
err = articleModel.UnFavoriteBy(models.GetArticleUserModel(myUserModel))
if err != nil {
c.JSON(http.StatusUnprocessableEntity, common.NewError("database", err))
return
}
serializer := serializers.ArticleSerializer{c, articleModel}
c.JSON(http.StatusOK, gin.H{"article": serializer.Response()})
}
func ArticleCommentCreate(c *gin.Context) {
slug := c.Param("slug")
articleModel, err := models.FindOneArticle(slug)
if err != nil {
c.JSON(http.StatusNotFound, common.NewError("comment", errors.New("Invalid slug")))
return
}
commentModelValidator := validator.NewCommentModelValidator()
if err := commentModelValidator.Bind(c); err != nil {
c.JSON(http.StatusUnprocessableEntity, common.NewValidatorError(err))
return
}
commentModelValidator.CommentModel.Article = articleModel
if err := models.SaveOne(&commentModelValidator.CommentModel); err != nil {
c.JSON(http.StatusUnprocessableEntity, common.NewError("database", err))
return
}
serializer := serializers.CommentSerializer{c, commentModelValidator.CommentModel}
c.JSON(http.StatusCreated, gin.H{"comment": serializer.Response()})
}
func ArticleCommentDelete(c *gin.Context) {
id64, err := strconv.ParseUint(c.Param("id"), 10, 32)
id := uint(id64)
if err != nil {
c.JSON(http.StatusNotFound, common.NewError("comment", errors.New("Invalid id")))
return
}
err = models.DeleteCommentModel([]uint{id})
if err != nil {
c.JSON(http.StatusNotFound, common.NewError("comment", errors.New("Invalid id")))
return
}
c.JSON(http.StatusOK, gin.H{"comment": "Delete comment success"})
}
func ArticleList(c *gin.Context) {
tag := c.Query("tag")
author := c.Query("author")
favorited := c.Query("favorited")
limit := c.Query("limit")
offset := c.Query("offset")
articleModels, modelCount, err := models.FindManyArticle(tag, author, limit, offset, favorited)
if err != nil {
c.JSON(http.StatusNotFound, common.NewError("articles", errors.New("Invalid param")))
return
}
serializer := serializers.ArticlesSerializer{c, articleModels}
c.JSON(http.StatusOK, gin.H{"articles": serializer.Response(), "articlesCount": modelCount})
}
func ArticleRetrieve(c *gin.Context) {
slug := c.Param("slug")
if slug == "feed" {
ArticleFeed(c)
return
}
articleModel, err := models.FindOneArticle(slug)
if err != nil {
c.JSON(http.StatusNotFound, common.NewError("articles", errors.New("Invalid slug")))
return
}
serializer := serializers.ArticleSerializer{c, articleModel}
c.JSON(http.StatusOK, gin.H{"article": serializer.Response()})
}
func ArticleCommentList(c *gin.Context) {
slug := c.Param("slug")
articleModel, err := models.FindOneArticle(slug)
if err != nil {
c.JSON(http.StatusNotFound, common.NewError("comments", errors.New("Invalid slug")))
return
}
err = articleModel.GetComments()
if err != nil {
c.JSON(http.StatusNotFound, common.NewError("comments", errors.New("Database error")))
return
}
serializer := serializers.CommentsSerializer{c, articleModel.Comments}
c.JSON(http.StatusOK, gin.H{"comments": serializer.Response()})
}
func ArticleFeed(c *gin.Context) {
limit := c.Query("limit")
offset := c.Query("offset")
myUserModel := c.MustGet("my_user_model").(models.UserModel)
if myUserModel.ID == 0 {
c.AbortWithError(http.StatusUnauthorized, errors.New("{error : \"Require auth!\"}"))
return
}
articleUserModel := models.GetArticleUserModel(myUserModel)
articleModels, modelCount, err := articleUserModel.GetArticleFeed(limit, offset)
if err != nil {
c.JSON(http.StatusNotFound, common.NewError("articles", errors.New("Invalid param")))
return
}
serializer := serializers.ArticlesSerializer{c, articleModels}
c.JSON(http.StatusOK, gin.H{"articles": serializer.Response(), "articlesCount": modelCount})
}
// tag List
func TagList(c *gin.Context) {
tagModels, err := models.GetAllTags()
if err != nil {
c.JSON(http.StatusNotFound, common.NewError("articles", errors.New("Invalid param")))
return
}
serializer := serializers.TagsSerializer{c, tagModels}
c.JSON(http.StatusOK, gin.H{"tags": serializer.Response()})
}
|
<gh_stars>0
/*
* MIT License
*
* Copyright (c) 2018 <NAME> (Falkreon) and contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.elytradev.marsenal.gui;
import java.util.ArrayList;
import java.util.List;
import com.elytradev.concrete.inventory.gui.ConcreteContainer;
import com.elytradev.concrete.inventory.gui.widget.WImage;
import com.elytradev.concrete.inventory.gui.widget.WPanel;
import com.elytradev.concrete.inventory.gui.widget.WPlainPanel;
import com.elytradev.marsenal.block.ArsenalBlocks;
import com.elytradev.marsenal.gui.widget.WButton;
import com.elytradev.marsenal.gui.widget.WItemDisplay;
import com.elytradev.marsenal.gui.widget.WSwappableImage;
import com.elytradev.marsenal.gui.widget.WTextArea;
import com.elytradev.marsenal.item.ArsenalItems;
import com.elytradev.marsenal.item.EnumSpellFocus;
import net.minecraft.inventory.IInventory;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.text.ITextComponent;
import net.minecraft.util.text.TextComponentString;
import net.minecraft.util.text.TextComponentTranslation;
public class ContainerCodex extends ConcreteContainer {
public static List<CodexPage> CODEX_PAGES = new ArrayList<>();
public static void initCodex() {
/*
CODEX_PAGES = new CodexPage[] {
new CodexPage(
new ResourceLocation("magicarsenal:textures/guis/codex/intro.png"),
new TextComponentTranslation("codex.magicarsenal.page.1.left"),
null,
null,
new TextComponentTranslation("codex.magicarsenal.page.1.right")
),
new CodexPage(
null,
new TextComponentTranslation("codex.magicarsenal.page.2.left"),
new ItemStack(ArsenalItems.ROOT_WOLFSBANE),
new ResourceLocation("magicarsenal:textures/guis/codex/feature.poison.png"),
new TextComponentTranslation("codex.magicarsenal.page.2.right")
),
new CodexPage(
null,
new TextComponentTranslation("codex.magicarsenal.page.3.left"),
new ItemStack(ArsenalBlocks.RUNESTONE1),
null,
new TextComponentTranslation("codex.magicarsenal.page.3.right")
),
new CodexPage(
null,
new TextComponentTranslation("codex.magicarsenal.page.4.left"),
new ItemStack(ArsenalBlocks.STELE_UNCARVED),
null,
new TextComponentTranslation("codex.magicarsenal.page.4.right")
),
new CodexPage(
null,
new TextComponentTranslation("codex.magicarsenal.page.5.left"),
null,
new ResourceLocation("magicarsenal:textures/guis/codex/feature.runes.png"),
null
),
new CodexPage(
new ResourceLocation("magicarsenal:textures/guis/codex/header.kenaz.png"),
new TextComponentTranslation("codex.magicarsenal.page.6.left"),
new ItemStack(ArsenalBlocks.STELE_KENAZ),
new ResourceLocation("magicarsenal:textures/guis/codex/feature.kenaz.png"),
new TextComponentTranslation("codex.magicarsenal.page.6.right")
),
new CodexPage(
null,
new TextComponentTranslation("codex.magicarsenal.page.7.left"),
new ItemStack(ArsenalBlocks.RUNIC_ALTAR),
new ResourceLocation("magicarsenal:textures/guis/codex/feature.altar.png"),
new TextComponentTranslation("codex.magicarsenal.page.7.right")
),
new CodexPage(
null,
new TextComponentTranslation("codex.magicarsenal.page.8.left"),
null,
null,
new TextComponentTranslation("codex.magicarsenal.page.8.right")
),
};*/
}
private int pages = 1;
private WSwappableImage header;
private WTextArea leftPage;
private WItemDisplay spotlight;
private WSwappableImage feature;
private WTextArea rightPage;
private WButton prevPage = new WButton();
private WButton nextPage = new WButton();
private int curPage = 0;
public ContainerCodex(IInventory player, IInventory container, TileEntity te) {
this(player, 1);
}
public ContainerCodex(IInventory player, int numPages) {
super(player, null);
pages = numPages;
//P A G E L A Y O U T
this.setColor(0xFFcbbf90);
WPlainPanel root = new WPlainPanel();
this.setRootPanel(root);
this.setDrawPanel(false);
root.add(new WImage(new ResourceLocation("magicarsenal","textures/guis/codex/bg.png")), 0, 0, 256, 128);
header = new WSwappableImage();
root.add(header, 10, 10, 100, 24);
leftPage = new WTextArea();
root.add(leftPage, 10, 10 + 3 + 24, 100, (100-(3+24)));
feature = new WSwappableImage();
root.add(feature, 136, 13, 100, 100); //Added here because of intended Z-order
spotlight = new WItemDisplay();
spotlight.setItemStack(new ItemStack(ArsenalItems.SPELL_FOCUS, 1, EnumSpellFocus.RECOVERY.ordinal()));
root.add(spotlight, 136, 13, 18, 18);
rightPage = new WTextArea();
root.add(rightPage, 136, 13+18+2, 100, 100-(13+18-2));
prevPage.setImage(new ResourceLocation("magicarsenal:textures/guis/codex/previous.png"));
prevPage.setOnClick(this::previous);
root.add(prevPage, 8, 103);
nextPage.setImage(new ResourceLocation("magicarsenal:textures/guis/codex/next.png"));
nextPage.setOnClick(this::next);
root.add(nextPage, 231, 103);
curPage = 0;
setCurPage();
}
public int getpageCount() {
return pages;
}
public String getLocalizedName() {
return "";
}
public void setCurPage() {
if (CODEX_PAGES.size()<1) return; //Just in case I do something REALLY dumb
if (curPage>=CODEX_PAGES.size()) curPage = CODEX_PAGES.size()-1;
if (curPage<0) curPage=0;
if (CODEX_PAGES.isEmpty()) return;
setToPage(CODEX_PAGES.get(curPage));
checkButtons();
}
private void setToPage(CodexPage page) {
header.setImage(page.header);
leftPage.setText(page.leftPage);
if (page.header==null) {
leftPage.setLocation(leftPage.getX(), 13);
} else {
leftPage.setLocation(leftPage.getX(), 10 + 3 + 24);
}
spotlight.setItemStack(page.getSpotlight());
rightPage.setText(page.rightPage);
if (page.spotlight==null || page.spotlight.isEmpty()) {
rightPage.setLocation(rightPage.getX(), 13);
} else {
rightPage.setLocation(rightPage.getX(), 13+18+2);
}
feature.setImage(page.feature);
}
public void checkButtons() {
nextPage.setEnabled(curPage<pages);
prevPage.setEnabled(curPage>0);
}
public void previous() {
curPage--;
setCurPage();
}
public void next() {
curPage++;
setCurPage();
}
public static class CodexPage {
/** A 50x16 image to be displayed across the top of the left page */
private ResourceLocation header = null;
/** Text to be displayed on the left page */
private ITextComponent leftPage;
/** A 50x50 image to be displayed on the full right page */
private ItemStack spotlight = null;
private ResourceLocation feature = null;
private ResourceLocation spotlightLoc = null;
private int spotlightData = 0;
/** Text to be displayed on the right page. Bottom-aligned if there's a spotlight. */
private ITextComponent rightPage;
public CodexPage(ITextComponent left, ITextComponent right) {
this(null, left, null, null, right);
}
public CodexPage(ResourceLocation header, ITextComponent left, ItemStack spotlight, ResourceLocation feature, ITextComponent right) {
this.header = header;
this.leftPage = left;
this.spotlight = spotlight;
this.feature = feature;
this.rightPage = right;
}
public CodexPage() {
}
public void setLeftPage(String leftPage) {
this.leftPage = new TextComponentString(leftPage);
}
public void setRightPage(String rightPage) {
this.rightPage = new TextComponentString(rightPage);
}
public void setHeader(String header) {
this.header = new ResourceLocation(header);
}
public void setSpotlight(String item, int meta) {
//Item lookup = Item.getByNameOrId(item);
//if (lookup==null) return;
//this.spotlight = new ItemStack(lookup, 1, meta);
this.spotlightLoc = new ResourceLocation(item);
this.spotlightData = meta;
}
public ItemStack getSpotlight() {
if (spotlight==null) {
if (spotlightLoc!=null) {
Item lookup = Item.REGISTRY.getObject(spotlightLoc);
if (lookup==null) return null;
spotlight = new ItemStack(lookup, 1, spotlightData);
} else {
return null;
}
}
return spotlight;
}
public void setFeature(String feature) {
this.feature = new ResourceLocation(feature);
}
}
}
|
import logging
import os
import boto3
import requests
# Set up logging for botocore library to debug level
logging.getLogger('botocore').setLevel(logging.DEBUG)
# Constants
SITE = 'http://www.python.org/'
CW_NAMESPACE = 'ProfilerPythonDemo'
S3_BUCKET = os.environ['S3_BUCKET']
# Function to collect performance data from the website
def collect_performance_data(site_url):
response = requests.get(site_url)
# Process the data (example: calculate response time, extract specific content, etc.)
processed_data = process_data(response.text)
return processed_data
# Function to process the collected data
def process_data(raw_data):
# Example processing: extracting specific content
processed_data = raw_data[:100] # Extracting the first 100 characters as an example
return processed_data
# Function to upload processed data to S3 bucket
def upload_to_s3(data, bucket_name):
s3 = boto3.client('s3')
s3.put_object(Bucket=bucket_name, Key='processed_data.txt', Body=data)
# Main script
if __name__ == "__main__":
performance_data = collect_performance_data(SITE)
upload_to_s3(performance_data, S3_BUCKET)
|
import random
import os
import tensorflow.compat.v1 as tf
import tempfile
import twremat
def splice_op(op, input_map, control_inputs=None):
g = op.graph
node_def = tf.NodeDef()
node_def.CopyFrom(op.node_def)
node_def.name = g.unique_name(op.name + '_copy')
inputs = [input_map.get(x, x) for x in op.inputs]
new_control_inputs = [input_map.get(x, x) for x in op.control_inputs]
if control_inputs:
new_control_inputs.extend([x for x in control_inputs if x is not None])
# new_control_inputs = control_inputs
output_types = [o.dtype for o in op.outputs]
op_def = op.op_def
return tf.Operation(node_def, g, inputs=inputs, output_types=output_types, op_def=op_def, control_inputs=new_control_inputs)
def splice_tensor(ten, new_op):
i = ten.op.outputs.index(ten)
return new_op.outputs[i]
def splice(obj, input_map, control_inputs=None):
if type(obj) is tf.Operation:
return splice_op(obj, input_map, control_inputs=control_inputs)
elif type(obj) is tf.Tensor:
return splice_tensor(obj, input_map.get(obj.op, obj.op))
elif type(obj) is tf.IndexedSlices:
return tf.IndexedSlices(values=input_map.get(obj.values, obj.values),
indices=input_map.get(obj.indices, obj.indices),
dense_shape=input_map.get(obj.dense_shape, obj.dense_shape))
else:
raise AssertionError(f'Could not get deps from{repr(type(obj))} {repr(obj)}')
def product(xs):
r = 1
for x in xs:
r *= x
return r
def shape_size(shape):
if shape.rank is None:
return 16
shape = shape.as_list()
for i in range(len(shape)):
if shape[i] is None and i == 0:
shape[i] = 1
elif shape[i] is None:
shape[i] = 1024
return product(shape)
def graph_from_dfs(deps, starts):
visited = set()
frontier = starts
while frontier:
x = frontier.pop()
if x in visited:
continue
visited.add(x)
frontier.extend(list(deps(x)))
return {x : list(deps(x)) for x in visited}
def get_deps(obj):
if type(obj) is tf.Operation:
return list(obj.inputs) + list(obj.control_inputs)
elif type(obj) is tf.Tensor:
return [obj.op]
elif type(obj) is tf.IndexedSlices:
return [obj.indices, obj.values, obj.dense_shape]
else:
raise AssertionError(f'Could not get deps from{repr(type(obj))} {repr(obj)}')
def tensor_graph(compute):
return graph_from_dfs(get_deps, list(compute))
def blacklist(obj):
if type(obj) is tf.Operation:
if 'Assign' in obj.type or 'Variable' in obj.type or 'Placeholder' in obj.type:
# TODO: Should we do special accounting for
# ReadVariableOp? Currently we forbid cloning altogether,
# but it's actually ok to clone this op as long as it
# doesn't float across an effectful op (Assign). Also
# currently we don't account for the memory used by
# ReadVariableOp (is it copy-on-write?).
# https://www.tensorflow.org/api_docs/python/tf/raw_ops/ReadVariableOp?hl=uk
return True
elif type(obj) is tf.Tensor:
return blacklist(obj.op)
return False
def estimate_cpu(op):
return sum(4 * shape_size(t.shape) for t in op.inputs if type(t) is tf.Tensor) + sum(4 * shape_size(t.shape) for t in op.outputs)
def estimate_mem(op):
return sum(4 * shape_size(t.shape) for t in op.outputs)
def info(op):
if blacklist(op):
return {'type': 'effectful'}
elif type(op) is tf.Operation:
if 'Reshape' in op.type:
return {'type': 'pointer'}
return {'type': 'normal',
'cpu': estimate_cpu(op),
'mem': estimate_mem(op)}
elif type(op) is tf.Tensor:
return {'type': 'pointer'}
elif type(op) is tf.IndexedSlices:
return {'type': 'pointer'}
else:
raise AssertionError(repr((type(op), op)))
# Helper functions to flatten and unflatten nested structures of
# tensors and ops so that tf_remat can be applied to structures
# without fiddly marshalling.
def get_ops(compute):
output = []
stack = [compute]
while stack:
top = stack.pop()
if type(top) is dict:
for v in top.values():
stack.append(v)
elif type(top) in (list, tuple):
stack.extend(top)
elif type(top) in (tf.Operation, tf.Tensor, tf.IndexedSlices):
output.append(top)
return output
def replace_ops(top, live):
if type(top) in (tf.Operation, tf.Tensor, tf.IndexedSlices):
return live[top]
elif type(top) is dict:
return {k : replace_ops(v, live) for (k,v) in top.items()}
elif type(top) is list:
return [replace_ops(v, live) for v in top]
elif type(top) is tuple:
return tuple(replace_ops(v, live) for v in top)
else:
return top
def tf_remat(compute, memlimit):
compute_ops = get_ops(compute)
tf_deps = tensor_graph(compute_ops)
# Relabel with integers
from_op = {op : i for (i, op) in enumerate(tf_deps.keys())}
from_node = {i : op for (op, i) in from_op.items()}
nodes = set(from_node.keys())
node_deps = {n : [from_op[d] for d in tf_deps[from_node[n]]] for n in nodes}
node_info = {}
for n in nodes:
node_info[n] = info(from_node[n])
node_info[n]['deps'] = [from_op[d] for d in tf_deps[from_node[n]]]
steps = twremat.runtwremat(node_info, memlimit, {from_op[c] for c in compute_ops})
print('Constructing tensorflow graph...')
live = {}
last_op = None
for (action, n) in steps:
base = from_node[n]
if action == 'compute':
input_map = {d : live[d] for d in tf_deps[base] if live[d] != d}
if blacklist(base) and not input_map:
live[base] = base
else:
live[base] = splice(base, input_map, control_inputs=[last_op])
if type(base) is tf.Operation:
last_op = live[base]
elif action == 'free':
del live[base]
return replace_ops(compute, live)
|
SELECT MAX(marks) FROM Student WHERE marks NOT IN (SELECT MAX(marks) FROM Student)
|
<html>
<head>
<title>Books</title>
</head>
<body>
<h2>All Books</h2>
<ul>
<?php
// Connect to the DB
include 'connect_sql.php';
// Get all book titles
$query = "SELECT title FROM books";
$result = mysqli_query($link, $query);
// Print out each title
while ($row = mysqli_fetch_array($result)) {
echo "<li>" . $row['title'] . "</li>";
}
?>
</ul>
</body>
</html>
|
#!/bin/bash
#
# Usage:
# $ create_django_project_run_env <appname>
source ./common_funcs.sh
check_root
# conventional values that we'll use throughout the script
APPNAME=$1
DOMAINNAME=$2
PYTHON_VERSION=$3
# check appname was supplied as argument
if [ "$APPNAME" == "" ] || [ "$DOMAINNAME" == "" ]; then
echo "Usage:"
echo " $ create_django_project_run_env <project> <domain> [python-version]"
echo
echo " Python version is 2 or 3 and defaults to 3 if not specified. Subversion"
echo " of Python will be determined during runtime. The required Python version"
echo " has to be installed and available globally."
echo
exit 1
fi
# Default python version to 3. OS has to have it installed.
if [ "$PYTHON_VERSION" == "" ]; then
PYTHON_VERSION=3
fi
if [ "$PYTHON_VERSION" != "3" -a "$PYTHON_VERSION" != "2" ]; then
error_exit "Invalid Python version specified. Acceptable values are 2 or 3 (default)"
fi
GROUPNAME=webapps
# app folder name under /webapps/<appname>_project
APPFOLDER=$1_project
APPFOLDERPATH=/$GROUPNAME/$APPFOLDER
# Determine requested Python version & subversion
if [ "$PYTHON_VERSION" == "3" ]; then
PYTHON_VERSION_STR=`python3 -c 'import sys; ver = "{0}.{1}".format(sys.version_info[:][0], sys.version_info[:][1]); print(ver)'`
else
PYTHON_VERSION_STR=`python -c 'import sys; ver = "{0}.{1}".format(sys.version_info[:][0], sys.version_info[:][1]); print ver'`
fi
# Verify required python version is installed
echo "Python version: $PYTHON_VERSION_STR"
# ###################################################################
# Create the app folder
# ###################################################################
echo "Creating app folder '$APPFOLDERPATH'..."
mkdir -p /$GROUPNAME/$APPFOLDER || error_exit "Could not create app folder"
# test the group 'webapps' exists, and if it doesn't create it
getent group $GROUPNAME
if [ $? -ne 0 ]; then
echo "Creating group '$GROUPNAME' for automation accounts..."
groupadd --system $GROUPNAME || error_exit "Could not create group 'webapps'"
fi
# create the app user account, same name as the appname
grep "$APPNAME:" /etc/passwd
if [ $? -ne 0 ]; then
echo "Creating automation user account '$APPNAME'..."
useradd --system --gid $GROUPNAME --shell /bin/bash --home $APPFOLDERPATH $APPNAME || error_exit "Could not create automation user account '$APPNAME'"
fi
# change ownership of the app folder to the newly created user account
echo "Setting ownership of $APPFOLDERPATH and its descendents to $APPNAME:$GROUPNAME..."
chown -R $APPNAME:$GROUPNAME $APPFOLDERPATH || error_exit "Error setting ownership"
# give group execution rights in the folder;
# TODO: is this necessary? why?
chmod g+x $APPFOLDERPATH || error_exit "Error setting group execute flag"
# install python virtualenv in the APPFOLDER
echo "Creating environment setup for django app..."
if [ "$PYTHON_VERSION" == "3" ]; then
su -l $APPNAME << 'EOF'
pwd
echo "Setting up python virtualenv..."
virtualenv -p python3 . || error_exit "Error installing Python 3 virtual environment to app folder"
EOF
else
su -l $APPNAME << 'EOF'
pwd
echo "Setting up python virtualenv..."
virtualenv . || error_exit "Error installing Python 2 virtual environment to app folder"
EOF
fi
# ###################################################################
# In the new app specific virtual environment:
# 1. Upgrade pip
# 2. Install django in it.
# 3. Create following folders:-
# static -- Django static files (to be collected here)
# media -- Django media files
# logs -- nginx, gunicorn & supervisord logs
# nginx -- nginx configuration for this domain
# ssl -- SSL certificates for the domain(NA if LetsEncrypt is used)
# ###################################################################
su -l $APPNAME << 'EOF'
source ./bin/activate
# upgrade pip
pip install --upgrade pip || error_exist "Error upgrading pip to the latest version"
# install prerequisite python packages for a django app using pip
echo "Installing base python packages for the app..."
# Standard django packages which will be installed. If any of these fail, script will abort
DJANGO_PKGS=('django' 'psycopg2' 'gunicorn' 'setproctitle')
for dpkg in "${DJANGO_PKGS[@]}"
do
echo "Installing $dpkg..."
pip install $dpkg || error_exit "Error installing $dpkg"
done
# create the default folders where we store django app's resources
echo "Creating static file folders..."
mkdir logs nginx run static media || error_exit "Error creating static folders"
# Create the UNIX socket file for WSGI interface
echo "Creating WSGI interface UNIX socket file..."
python -c "import socket as s; sock = s.socket(s.AF_UNIX); sock.bind('./run/gunicorn.sock')"
EOF
# ###################################################################
# Generate Django production secret key
# ###################################################################
echo "Generating Django secret key..."
DJANGO_SECRET_KEY=`openssl rand -base64 48`
if [ $? -ne 0 ]; then
error_exit "Error creating secret key."
fi
echo $DJANGO_SECRET_KEY > $APPFOLDERPATH/.django_secret_key
chown $APPNAME:$GROUPNAME $APPFOLDERPATH/.django_secret_key
# ###################################################################
# Generate DB password
# ###################################################################
echo "Creating secure password for database role..."
DBPASSWORD=`openssl rand -base64 32`
if [ $? -ne 0 ]; then
error_exit "Error creating secure password for database role."
fi
echo $DBPASSWORD > $APPFOLDERPATH/.django_db_password
chown $APPNAME:$GROUPNAME $APPFOLDERPATH/.django_db_password
# ###################################################################
# Create the script that will init the virtual environment. This
# script will be called from the gunicorn start script created next.
# ###################################################################
echo "Creating virtual environment setup script..."
cat > /tmp/prepare_env.sh << EOF
DJANGODIR=$APPFOLDERPATH/$APPNAME # Django project directory
export DJANGO_SETTINGS_MODULE=$APPNAME.settings.production # settings file for the app
export PYTHONPATH=\$DJANGODIR:\$PYTHONPATH
export SECRET_KEY=`cat $APPFOLDERPATH/.django_secret_key`
export DB_PASSWORD=`cat $APPFOLDERPATH/.django_db_password`
cd $APPFOLDERPATH
source ./bin/activate
EOF
mv /tmp/prepare_env.sh $APPFOLDERPATH
chown $APPNAME:$GROUPNAME $APPFOLDERPATH/prepare_env.sh
# ###################################################################
# Create gunicorn start script which will be spawned and managed
# using supervisord.
# ###################################################################
echo "Creating gunicorn startup script..."
cat > /tmp/gunicorn_start.sh << EOF
#!/bin/bash
# Makes the following assumptions:
#
# 1. All applications are located in a subfolder within /webapps
# 2. Each app gets a dedicated subfolder <appname> under /webapps. This will
# be referred to as the app folder.
# 3. The group account 'webapps' exists and each app is to be executed
# under the user account <appname>.
# 4. The app folder and all its recursive contents are owned by
# <appname>:webapps.
# 5. The django app is stored under /webapps/<appname>/<appname> folder.
#
cd $APPFOLDERPATH
source ./prepare_env.sh
SOCKFILE=$APPFOLDERPATH/run/gunicorn.sock # we will communicte using this unix socket
USER=$APPNAME # the user to run as
GROUP=$GROUPNAME # the group to run as
NUM_WORKERS=3 # how many worker processes should Gunicorn spawn
DJANGO_WSGI_MODULE=$APPNAME.wsgi # WSGI module name
echo "Starting $APPNAME as \`whoami\`"
# Create the run directory if it doesn't exist
RUNDIR=\$(dirname \$SOCKFILE)
test -d \$RUNDIR || mkdir -p \$RUNDIR
# Start your Django Unicorn
# Programs meant to be run under supervisor should not daemonize themselves (do not use --daemon)
exec ./bin/gunicorn \${DJANGO_WSGI_MODULE}:application \
--name $APPNAME \
--workers \$NUM_WORKERS \
--user=\$USER --group=\$GROUP \
--bind=unix:\$SOCKFILE \
--log-level=debug \
--log-file=-
EOF
# Move the script to app folder
mv /tmp/gunicorn_start.sh $APPFOLDERPATH
chown $APPNAME:$GROUPNAME $APPFOLDERPATH/gunicorn_start.sh
chmod u+x $APPFOLDERPATH/gunicorn_start.sh
# ###################################################################
# Create the PostgreSQL database and associated role for the app
# Database and role name would be the same as the <appname> argument
# ###################################################################
echo "Creating PostgreSQL role '$APPNAME'..."
su postgres -c "createuser -S -D -R -w $APPNAME"
echo "Changing password of database role..."
su postgres -c "psql -c \"ALTER USER $APPNAME WITH PASSWORD '$DBPASSWORD';\""
echo "Creating PostgreSQL database '$APPNAME'..."
su postgres -c "createdb --owner $APPNAME $APPNAME"
# ###################################################################
# Create nginx template in $APPFOLDERPATH/nginx
# ###################################################################
mkdir -p $APPFOLDERPATH/nginx
APPSERVERNAME=$APPNAME
APPSERVERNAME+=_gunicorn
cat > $APPFOLDERPATH/nginx/$APPNAME.conf << EOF
upstream $APPSERVERNAME {
server unix:$APPFOLDERPATH/run/gunicorn.sock fail_timeout=0;
}
server {
listen 80;
server_name $DOMAINNAME;
client_max_body_size 5M;
keepalive_timeout 5;
underscores_in_headers on;
access_log $APPFOLDERPATH/logs/nginx-access.log;
error_log $APPFOLDERPATH/logs/nginx-error.log;
location /media {
alias $APPFOLDERPATH/media;
}
location /static {
alias $APPFOLDERPATH/static;
}
location /static/admin {
alias $APPFOLDERPATH/lib/python$PYTHON_VERSION_STR/site-packages/django/contrib/admin/static/admin/;
}
# This would redirect http site access to HTTPS. Uncomment to enable
#location / {
# rewrite ^ https://\$http_host\$request_uri? permanent;
#}
# To make the site pure HTTPS, comment the following section while
# uncommenting the above section. Also uncoment the HTTPS section
location / {
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
proxy_set_header Host \$http_host;
proxy_redirect off;
proxy_pass http://$APPSERVERNAME;
}
}
# Uncomment this if you want to enable HTTPS access. Also, remember to install
# the site certificate, either purcahased or generated.
#server {
# listen 443 default ssl;
# server_name $DOMAINNAME;
#
# client_max_body_size 5M;
# keepalive_timeout 5;
#
# ssl_certificate /etc/nginx/ssl/cert_chain.crt;
# ssl_certificate_key $APPFOLDERPATH/ssl/$DOMAINNAME.key;
#
# access_log $APPFOLDERPATH/logs/nginx-access.log;
# error_log $APPFOLDERPATH/logs/nginx-error.log;
#
# location /media {
# alias $APPFOLDERPATH/media;
# }
# location /static {
# alias $APPFOLDERPATH/static;
# }
# location /static/admin {
# alias $APPFOLDERPATH/lib/python$PYTHON_VERSION_STR/site-packages/django/contrib/admin/static/admin/;
# }
# location / {
# proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
# proxy_set_header Host \$http_host;
# proxy_set_header X-Forwarded-Proto \$scheme;
# proxy_redirect off;
# proxy_pass http://$APPSERVERNAME;
# }
#}
EOF
# make a symbolic link to the nginx conf file in sites-enabled
ln -sf $APPFOLDERPATH/nginx/$APPNAME.conf /etc/nginx/sites-enabled/$APPNAME
# ###################################################################
# Setup supervisor
# ###################################################################
# Copy supervisord.conf if it does not exist
if [ ! -f /etc/supervisord.conf ]; then
cp ./supervisord.conf /etc || error_exit "Error copying supervisord.conf"
fi
# Create the supervisor application conf file
mkdir -p /etc/supervisor
cat > /etc/supervisor/$APPNAME.conf << EOF
[program:$APPNAME]
command = $APPFOLDERPATH/gunicorn_start.sh
user = $APPNAME
stdout_logfile = $APPFOLDERPATH/logs/gunicorn_supervisor.log
redirect_stderr = true
EOF
SUPERVISORD_ACTION='reload'
# Create supervisord init.d script that can be controlled with service
if [ ! -f /etc/init.d/supervisord ]; then
echo "Setting up supervisor to autostart during bootup..."
cp ./supervisord /etc/init.d || error_exit "Error copying /etc/init.d/supervisord"
# enable execute flag on the script
chmod +x /etc/init.d/supervisord || error_exit "Error setting execute flag on supervisord"
# create the entries in runlevel folders to autostart supervisord
update-rc.d supervisord defaults || error_exit "Error configuring supervisord to autostart"
SUPERVISORD_ACTION='start'
fi
# Now create a quasi django project that can be run using a GUnicorn script
echo "Installing quasi django project..."
su -l $APPNAME << EOF
source ./bin/activate
django-admin.py startproject $APPNAME
# Change Django's default settings.py to use app/settings/{base.py|dev.py|production.py}
mv $APPNAME/$APPNAME/settings.py $APPNAME/$APPNAME/base.py
mkdir $APPNAME/$APPNAME/settings
mv $APPNAME/$APPNAME/base.py $APPNAME/$APPNAME/settings
EOF
echo "Changing quasi django project settings to production.py..."
cat > $APPFOLDERPATH/$APPNAME/$APPNAME/settings/production.py << EOF
from .base import *
def get_env_variable(var):
'''Return the environment variable value or raise error'''
try:
return os.environ[var]
except KeyError:
error_msg = "Set the {} environment variable".format(var)
raise ImproperlyConfigured(error_msg)
DEBUG = False
# Note that this is a wildcard specification. So it matches
# smallpearl.com as well as www.smallpearl.com
ALLOWED_HOSTS = ['.$DOMAINNAME']
# CSRF middleware token & session cookie will only be transmitted over HTTPS
CSRF_COOKIE_SECURE = True
SESSION_COOKIE_SECURE = True
# Get secret hash key from environment variable (set by ./prepre_env.sh)
SECRET_KEY = get_env_variable('SECRET_KEY')
# Get production DB password is from environment variable
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': '$APPNAME',
'USER': '$APPNAME',
'PASSWORD': get_env_variable('DB_PASSWORD'),
'HOST': 'localhost',
'PORT': '',
}
}
# This setting corresponds to NGINX server configuration, which adds this
# to the request headers that is proxied to gunicorn app server.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
EOF
chown $APPNAME:$GROUPNAME $APPFOLDERPATH/$APPNAME/$APPNAME/settings/production.py
# ###################################################################
# Reload/start supervisord and nginx
# ###################################################################
# Start/reload the supervisord daemon
service supervisord status > /dev/null
if [ $? -eq 0 ]; then
# Service is running, restart it
service supervisord restart || error_exit "Error restarting supervisord"
else
# Service is not running, probably it's been installed first. Start it
service supervisord start || error_exit "Error starting supervisord"
fi
# Reload nginx so that requests to domain are redirected to the gunicorn process
nginx -s reload || error_exit "Error reloading nginx. Check configuration files"
echo "Done!"
|
#!/usr/bin/env bash
# Copyright 2021 The Crossplane Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Please set ProviderNameLower & ProviderNameUpper environment variables before running this script.
# See: https://github.com/crossplane/terrajet/blob/main/docs/generating-a-provider.md
set -euo pipefail
REPLACE_FILES='./* ./.github :!build/** :!go.* :!hack/prepare.sh'
# shellcheck disable=SC2086
git grep -l 'template' -- ${REPLACE_FILES} | xargs sed -i.bak "s/template/${ProviderNameLower}/g"
# shellcheck disable=SC2086
git grep -l 'Template' -- ${REPLACE_FILES} | xargs sed -i.bak "s/Template/${ProviderNameUpper}/g"
# We need to be careful while replacing "template" keyword in go.mod as it could tamper
# some imported packages under require section.
sed -i.bak "s/provider-jet-template/provider-jet-${ProviderNameLower}/g" go.mod
# Clean up the .bak files created by sed
git clean -fd
git mv "internal/clients/template.go" "internal/clients/${ProviderNameLower}.go"
git mv "cluster/images/provider-jet-template" "cluster/images/provider-jet-${ProviderNameLower}"
git mv "cluster/images/provider-jet-template-controller" "cluster/images/provider-jet-${ProviderNameLower}-controller"
# We need to remove this api folder otherwise first `make generate` fails with
# the following error probably due to some optimizations in go generate with v1.17:
# generate: open /Users/hasanturken/Workspace/crossplane-contrib/provider-jet-template/apis/null/v1alpha1/zz_generated.deepcopy.go: no such file or directory
rm -rf apis/null
|
docker rm -f $(docker ps -a | grep "zentao-kpi*" | awk '{print $1}')
docker pull registry.cn-shenzhen.aliyuncs.com/kuaima/zentao-kpi:latest
docker run -d --name="zentao-kpi" -m 2G -e JAVA_OPS="-Xms512m -Xmx2024m" -e PROFILES="--spring.profiles.active=verify " -p 5200:5200 registry.cn-shenzhen.aliyuncs.com/kuaima/zentao-kpi:latest
docker logs $(docker ps -a | grep "zentao-kpi*" | awk '{print $1}')
|
#!/bin/bash -l
# ==============================================================================
# SUMMARY
# ==============================================================================
# Daily files to 1971-2000 to 2070-2099 signals and scaled signals for water temperature
# =======================================================================
# INITIALIZATION
# =======================================================================
# set output directory
outDIR=
# set starting directory
inDIR=
# set working directory
workDIR=
# set depth directory for indexing fields
deDIR=
# Define settings
flag_sect=1; # 0: all sectors
# 1: lakes_global
flag_models=1; # 0: CLM45
# 1: ALBM
# 2: LAKE
# 3: SIMSTRAT-UoG
# 4: VIC-LAKE
flag_scen=1; # 0: pre-industrial
# 1: historical
# 2: future
flag_tstep=1; # 0: daily
# 1: monthly
# 2: annual
flag_var=0; # 0: watertemp
# 1: lakeicefrac
# 2: icethick
flag_endvar=0; # 0: icestart
# 1: iceend
# 2: icedur
flag_opt=0; # 0: 2005soc_co2
# 1: 1860soc_co2
# 2: nosoc_co2 (for simstrat hist/future)
flag_prod=1; # 0: fldmean
# 1: sig
# 2: eval
# define all possible sectors
sectors=("all" "lakes_global")
# define all possible models (top list; folder style, bottom list; file name style)
models=("CLM45" "ALBM" "LAKE" "SIMSTRAT-UoG" "VIC-LAKE")
model_fnames=("clm45" "albm" "lake" "simstrat-uog" "vic-lake")
# scenario folders
scenario_folders=("historical" "future")
#define all RCPs for future scenario
rcps=("rcp26" "rcp60" "rcp85")
# define lake variables
variables=("watertemp" "lakeicefrac" "icethick")
#define end lake variables
endvariables=("icestart" "iceend" "icedur")
# define forcing
forcing=("gfdl-esm2m" "hadgem2-es" "ipsl-cm5a-lr" "miroc5")
# define timestep
timesteps=("daily" "monthly")
# define all options
options=("2005soc_co2" "1860soc_co2" "nosoc_co2")
# seasons
seasons=("DJF" "MAM" "JJA" "SON")
# define end products
products=("fldmean" "sig" "eval")
# scaling arrays (RCP 2.6 6.0 8.5 global mean air temperature changes per GCM)
gfdl_scalers=(0.76875 1.7713 2.76895)
hadgem2_scalers=(1.52805 3.24965 4.81365)
ipsl_scalers=(1.43065 2.7116 4.60615)
miroc5_scalers=(1.20695 2.15395 3.47205)
# set run settings based on flags
tstep=${timesteps[$flag_tstep]}
model=${models[$flag_models]}
model_fname=${model_fnames[$flag_models]}
var=${variables[$flag_var]}
endvar=${endvariables[$flag_endvar]}
opt=${options[$flag_opt]}
prod=${products[$flag_prod]}
# periods for signal
hist_periods=("1971_1980" "1981_1990" "1991_2000")
fut_periods=("2061_2070" "2071_2080" "2081_2090" "2091_2099")
# ==============================================================================
# FUNCTIONS
# ==============================================================================
lev_indexer(){ # $1:
# $2:
if [ "$model_fname" == "clm45" ]
then
cdo -O sellevidx,3 $1 $2
elif [ "$model_fname" == "albm" ]
then
cdo -O intlevel3d,$deDIR/albm_og_field.nc $1 $deDIR/albm_index_field.nc $2
elif [ "$model_fname" == "lake" ]
then
echo "No system for indexing yet"
elif [ "$model_fname" == "simstrat-uog" ]
then
cdo -O sellevidx,3 $1 $2
elif [ "$model_fname" == "vic-lake" ]
then
cdo -O sellevidx,3 $1 $2
elif [ "$model_fname" == "gotm" ]
then
echo "No system for indexing yet"
fi
}
# ==============================================================================
# PROCESSING
# ==============================================================================
# go into climate directory
cd $inDIR
pwd
# ==============================================================================
# HISTORICAL/FUTURE AVERAGES
# ==============================================================================
echo "calculating hist/future watertemp for seasons"
for force in "${forcing[@]}"; do
echo $force
for scen_folder in "${scenario_folders[@]}"; do
if [ "$tstep" == "daily" ]
then
if [ "$scen_folder" == "historical" ]
then scen="historical"
for per in "${hist_periods[@]}"; do
cdo monmean $inDIR/$model/$force/$scen_folder/${model_fname}_${force}_ewembi_${scen}_${opt}_${var}_global_${tstep}_${per}.nc4 $workDIR/step_a_$per.nc
lev_indexer $workDIR/step_a_$per.nc $workDIR/step_b_$per.nc
done
cdo mergetime $workDIR/step_b_*.nc $workDIR/step_b.nc
rm $workDIR/step_a_*.nc
# historical seasons
cdo -L timmean -selmon,12,1,2 -seldate,1971-01-01T00:00:00,2000-12-31T00:00:00 $workDIR/step_b.nc $workDIR/DJF_dummy_${force}_step1.nc
cdo -L timmean -selmon,3/5 -seldate,1971-01-01T00:00:00,2000-12-31T00:00:00 $workDIR/step_b.nc $workDIR/MAM_dummy_${force}_step1.nc
cdo -L timmean -selmon,6/8 -seldate,1971-01-01T00:00:00,2000-12-31T00:00:00 $workDIR/step_b.nc $workDIR/JJA_dummy_${force}_step1.nc
cdo -L timmean -selmon,9/11 -seldate,1971-01-01T00:00:00,2000-12-31T00:00:00 $workDIR/step_b.nc $workDIR/SON_dummy_${force}_step1.nc
rm $workDIR/step_b.nc
elif [ "$scen_folder" == "future" ]
then
for rcp in "${rcps[@]}"; do
for per in "${fut_periods[@]}"; do
cdo monmean $inDIR/$model/$force/$scen_folder/${model_fname}_${force}_ewembi_${rcp}_${opt}_${var}_global_${tstep}_${per}.nc4 $workDIR/step_a_$per.nc
lev_indexer $workDIR/step_a_$per.nc $workDIR/step_b_$per.nc
done
cdo mergetime $workDIR/step_b_*.nc $workDIR/step_b.nc
rm $workDIR/step_a_*.nc
# future seasons
cdo -L timmean -selmon,12,1,2 -seldate,2070-01-01T00:00:00,2099-12-31T00:00:00 $workDIR/step_b.nc $workDIR/DJF_dummy_${force}_${rcp}_step1.nc
cdo -L timmean -selmon,3/5 -seldate,2070-01-01T00:00:00,2099-12-31T00:00:00 $workDIR/step_b.nc $workDIR/MAM_dummy_${force}_${rcp}_step1.nc
cdo -L timmean -selmon,6/8 -seldate,2070-01-01T00:00:00,2099-12-31T00:00:00 $workDIR/step_b.nc $workDIR/JJA_dummy_${force}_${rcp}_step1.nc
cdo -L timmean -selmon,9/11 -seldate,2070-01-01T00:00:00,2099-12-31T00:00:00 $workDIR/step_b.nc $workDIR/SON_dummy_${force}_${rcp}_step1.nc
rm $workDIR/step_b.nc
done
fi
elif [ "$tstep" == "monthly" ]
then
if [ "$scen_folder" == "historical" ]
then scen="historical"
per="1861_2005"
lev_indexer $inDIR/$model/$force/$scen_folder/${model_fname}_${force}_ewembi_${scen}_${opt}_${var}_global_${tstep}_${per}.nc4 $workDIR/step_a_${per}.nc
# historical seasons
cdo -L timmean -selmon,12,1,2 -seldate,1971-01-01T00:00:00,2000-12-31T00:00:00 $workDIR/step_a_${per}.nc $workDIR/DJF_dummy_${force}_step1.nc
cdo -L timmean -selmon,3/5 -seldate,1971-01-01T00:00:00,2000-12-31T00:00:00 $workDIR/step_a_${per}.nc $workDIR/MAM_dummy_${force}_step1.nc
cdo -L timmean -selmon,6/8 -seldate,1971-01-01T00:00:00,2000-12-31T00:00:00 $workDIR/step_a_${per}.nc $workDIR/JJA_dummy_${force}_step1.nc
cdo -L timmean -selmon,9/11 -seldate,1971-01-01T00:00:00,2000-12-31T00:00:00 $workDIR/step_a_${per}.nc $workDIR/SON_dummy_${force}_step1.nc
elif [ "$scen_folder" == "future" ]
then per="2006_2099"
for rcp in "${rcps[@]}"; do
lev_indexer $inDIR/$model/$force/$scen_folder/${model_fname}_${force}_ewembi_${rcp}_${opt}_${var}_global_${tstep}_${per}.nc4 $workDIR/step_a_${rcp}_${per}.nc
# future seasons
cdo -L timmean -selmon,12,1,2 -seldate,2070-01-01T00:00:00,2099-12-31T00:00:00 $workDIR/step_a_${rcp}_${per}.nc $workDIR/DJF_dummy_${force}_${rcp}_step1.nc
cdo -L timmean -selmon,3/5 -seldate,2070-01-01T00:00:00,2099-12-31T00:00:00 $workDIR/step_a_${rcp}_${per}.nc $workDIR/MAM_dummy_${force}_${rcp}_step1.nc
cdo -L timmean -selmon,6/8 -seldate,2070-01-01T00:00:00,2099-12-31T00:00:00 $workDIR/step_a_${rcp}_${per}.nc $workDIR/JJA_dummy_${force}_${rcp}_step1.nc
cdo -L timmean -selmon,9/11 -seldate,2070-01-01T00:00:00,2099-12-31T00:00:00 $workDIR/step_a_${rcp}_${per}.nc $workDIR/SON_dummy_${force}_${rcp}_step1.nc
done
fi
fi
done
done
# ==============================================================================
# SIGNALS
# ==============================================================================
echo "regular and scaled signals"
for rcp in "${rcps[@]}"; do
if [ "$rcp" == "rcp26" ]; then flag=0
elif [ "$rcp" == "rcp60" ]; then flag=1
elif [ "$rcp" == "rcp85" ]; then flag=2
fi
echo $flag
echo $rcp
for force in "${forcing[@]}"; do
if [ "$force" == "gfdl-esm2m" ]; then scaler=${gfdl_scalers[$flag]}
elif [ "$force" == "hadgem2-es" ]; then scaler=${hadgem2_scalers[$flag]}
elif [ "$force" == "ipsl-cm5a-lr" ]; then scaler=${ipsl_scalers[$flag]}
elif [ "$force" == "miroc5" ]; then scaler=${miroc5_scalers[$flag]}
fi
echo $force
for season in "${seasons[@]}"; do
# subtract for regular watertemp signal
cdo sub $workDIR/${season}_dummy_${force}_${rcp}_step1.nc $workDIR/${season}_dummy_${force}_step1.nc $workDIR/${season}_dummy_${force}_${rcp}_step2.nc
# divide for scaled watertemp signal
cdo divc,$scaler $workDIR/${season}_dummy_${force}_${rcp}_step2.nc $workDIR/${season}_dummy_${force}_${rcp}_step3.nc
rm $workDIR/${season}_dummy_${force}_${rcp}_step1.nc
done
done
done
# ==============================================================================
# ENSEMBLE MEANS OF SIGNALS
# ==============================================================================
echo "ensemble means"
for rcp in "${rcps[@]}"; do
echo $rcp
for season in "${seasons[@]}"; do
# ensemble mean of regular watertemp signals
cdo -O ensmean $workDIR/${season}_dummy_*_${rcp}_step2.nc $workDIR/${model_fname}_${rcp}_${var}_${season}_${prod}.nc
ncwa -C -v lat,lon,$var -a time,levlak,lev $workDIR/${model_fname}_${rcp}_${var}_${season}_${prod}.nc $outDIR/${model_fname}_${rcp}_${var}_${season}_${prod}.nc
# ensemble mean of scaled watertemp signals
cdo -O ensmean $workDIR/${season}_dummy_*_${rcp}_step3.nc $workDIR/${model_fname}_${rcp}_${var}_${season}_scaled_${prod}.nc
ncwa -C -v lat,lon,$var -a time,levlak,lev $workDIR/${model_fname}_${rcp}_${var}_${season}_scaled_${prod}.nc $outDIR/${model_fname}_${rcp}_${var}_${season}_scaled_${prod}.nc
rm $workDIR/${season}_dummy_*_${rcp}_step2.nc
rm $workDIR/${season}_dummy_*_${rcp}_step3.nc
done
done
# ==============================================================================
# CLEANUP
# ==============================================================================
echo "cleanup"
rm $workDIR/*.nc
|
import numpy as np
# define input matrix
matrix = np.array([[1.0, 2.0, 3.0],
[2.0, 4.0, 6.0],
[3.0, 6.0, 9.0]])
# calculate column means
means = matrix.mean(axis=0)
# calculate column standard deviations
std = matrix.std(axis=0)
# normalize matrix
normalized_matrix = (matrix - means)/std
# print normalized matrix
print(normalized_matrix)
|
import scipy.optimize
# define the objective function
def obj_func(x):
return -x + 5
# define the constraint function
def constraints(x):
g1 = x - 3
g2 = 8 - x
return (g1, g2)
# solve the optimization problem
opt = scipy.optimize.minimize(obj_func,
x0=0,
constraints=constraints)
# print the solution
print("Optimal solution is: ", opt.x)
|
#!/usr/bin/env python3
import queries
import colorama
import colors
def main():
colorama.init()
print(colors.bold_color + "MongoDB Schema Performance app by @mkennedy")
print(colors.subdue_color + 'https://github.com/mikeckennedy/mongodb_schema_design_mannheim')
print()
queries.run()
if __name__ == '__main__':
main()
|
<reponame>lananh265/social-network
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.u26AA = void 0;
var u26AA = {
"viewBox": "0 0 2600 2760.837",
"children": [{
"name": "path",
"attribs": {
"d": "M1465 1566.5q-69 68.5-165 68.5t-165-68.5-69-165.5q0-96 69-165t165-69 165 69 69 165q0 97-69 165.5zm-83-247.5q-34-34-82-34t-82 34-34 82 34 82 82 34 82-34 34-82-34-82z"
},
"children": []
}]
};
exports.u26AA = u26AA;
|
// background.js
function blockWebsites(details) {
const url = new URL(details.url);
let block = false;
// list of websites that should be blocked
// (replace with real websites)
const blockedWebsites = ['example.com', 'another-example.com'];
blockedWebsites.forEach(website => {
if (url.hostname.includes(website)) {
block = true;
}
});
if (block) {
return { cancel: true };
} else {
return { cancel: false };
}
}
chrome.webRequest.onBeforeRequest.addListener(
blockWebsites,
{ urls: ['<all_urls>'] },
['blocking']
);
|
import { IWalletOutput } from "./IWalletOutput";
export interface IWalletAddressOutput {
/**
* The address.
*/
address: string;
/**
* The outputs.
*/
outputs: IWalletOutput[];
}
|
#include <string>
class cluster_info {
private:
std::string cluster_name;
int num_nodes;
double total_memory_capacity;
public:
// Constructor to initialize the cluster information with default values
cluster_info() : cluster_name("DefaultCluster"), num_nodes(0), total_memory_capacity(0.0) {}
// Method to set the cluster name
void set_cluster_name(const std::string& name) {
cluster_name = name;
}
// Method to get the cluster name
std::string get_cluster_name() const {
return cluster_name;
}
// Method to set the number of nodes in the cluster
void set_num_nodes(int nodes) {
num_nodes = nodes;
}
// Method to get the number of nodes in the cluster
int get_num_nodes() const {
return num_nodes;
}
// Method to set the total memory capacity of the cluster
void set_total_memory_capacity(double capacity) {
total_memory_capacity = capacity;
}
// Method to get the total memory capacity of the cluster
double get_total_memory_capacity() const {
return total_memory_capacity;
}
// Method to calculate the average memory capacity per node in the cluster
double calculate_average_memory_per_node() const {
if (num_nodes == 0) {
return 0.0; // Avoid division by zero
}
return total_memory_capacity / num_nodes;
}
};
|
#!/bin/bash
set -e
# Load configuration for current environment.
if [ -f .env ]; then
source .env
else
echo "Missing .env file!"
exit 1
fi
HOST=${1:-$LINODE_HOST}
USER=${2:-$LINODE_USER}
BASEDIR=$(dirname "$0")
DEPLOYIGNORE=$BASEDIR/.deployignore
BUILD_DIR="./wp/"
if [ -z $HOST ] || [ -z $USER ]; then
echo "Please pass the Linode SFTP host and user name for your site, or define them using \$LINODE_HOST and \$LINODE_USER env variables."
exit 1
fi
echo "Deploying WordPress theme and core..."
rsync \
-rlvz \
--exclude-from="$DEPLOYIGNORE" \
--ipv4 \
--delete-after \
-e 'ssh -o StrictHostKeyChecking=no' \
--temp-dir=/tmp/ \
$BUILD_DIR \
$USER@$HOST:/var/www/bethandnick
|
/* ---------------------------------------------------------------------------
//
// CodeFinder
//
// Copyright (C) 2020 Instituto de Telecomunicações (www.it.pt)
// Copyright (C) 2020 Universidade da Beira Interior (www.ubi.pt)
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
// 'images: Flaticon.com'. The logo of the application has been designed
// using resources from Flaticon.com.
// ---------------------------------------------------------------------------
*/
package com.teaching.codefinder;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
public class Reader {
InputStream inputStream;
/** Praise the class constructor. */
public Reader(InputStream inputStream){
this.inputStream = inputStream;
}
/** Read the students CSV file into a list of students and return it. */
public List read(){
List<Student> resultList = new ArrayList();
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
try {
String csvLine;
while ((csvLine = reader.readLine()) != null) {
String[] row = csvLine.split(",");
resultList.add(new Student(row[0], row[2], row[1]));
}
}
catch (IOException ex) {
throw new RuntimeException("Error while reading CSV file: "+ex);
}
finally {
try {
inputStream.close();
}
catch (IOException e) {
throw new RuntimeException("Error while closing input stream: "+e);
}
}
return resultList;
}
}
|
<filename>app/controllers/subscriptions_controller.rb
class SubscriptionsController < ApplicationController
before_action(:authenticate_user!)
def create
new_favourite = Subscription.create(
user: current_user,
category: Category.find(params[:id])
).save()
redirect_back(fallback_location: forum_path())
end
def delete
Subscription.where(category: Category.find(params[:id]), user: current_user).delete_all()
redirect_back(fallback_location: forum_path())
end
end
|
/// <reference types="cypress" />
describe('BTC2x-FLI', () => {
before(() => {
cy.visit('http://localhost:3000/btcfli')
})
context('Product Header', () => {
it('should show product symbol', () => {
cy.get('[data-cy=token-symbol]').should('contain', 'BTC2x-FLI')
})
it('should show product name', () => {
cy.get('[data-cy=token-name]').should(
'contain',
'Bitcoin 2x Flexible Leverage Index'
)
})
it('should show product price', () => {
cy.get('.sc-jhDJEt').should('not.be.empty')
})
it('should show product percent change', () => {
cy.get('.sc-jhDJEt').should('not.be.empty')
})
})
context('Product Metadata', () => {
it('should show real leverage', () => {
cy.get('[data-cy=real-leverage-label').should('contain', 'Real Leverage')
cy.get('[data-cy=real-leverage-value]').should('contain', 'x')
})
it('should show target leverage', () => {
cy.get('[data-cy=target-leverage-label]').should(
'contain',
'Target Leverage'
)
cy.get('[data-cy=target-leverage-value]').should('contain', '2x')
})
it('should show current supply', () => {
cy.get('[data-cy=current-supply-label]').should(
'contain',
'Current Supply'
)
cy.get('[data-cy=current-supply-value]').should('not.be.empty')
})
it('should show NAV', () => {
cy.get('[data-cy=net-asset-value-label]').should(
'contain',
'Net Asset Value'
)
cy.get('[data-cy=net-asset-value-value]').should('not.be.empty')
})
it('should show prem/discount', () => {
cy.get(':nth-child(5) > .sc-hndLF').should('not.be.empty')
cy.get(':nth-child(5) > .sc-geBCVM').should('not.be.empty')
})
})
context('Product Market Data', () => {
it('should render chart', () => {
cy.get('.recharts-surface').should('not.be.empty')
})
it('should have all date range selectors', () => {
// 5 date range selectors and 4 spacers between. 5 + 4 = 9
cy.get('[data-cy=date-range-selector]')
.children()
.should('have.length', 9)
})
})
context('Buy/Sell Widget', () => {
it('should render', () => {
cy.get('[data-cy=buy-sell-selector]').should('contain', 'Buy')
cy.get('[data-cy=buy-sell-selector]').should('contain', 'Sell')
cy.get('[data-cy=buy-sell-selector]').should('contain', 'Pay with')
cy.get('[data-cy=buy-sell-selector]').should('contain', 'Buy (estimated)')
})
})
context('Product Stats', () => {
it('should have a title', () => {
cy.get('[data-cy=stats]').should('contain', 'Stats')
})
it('should render values', () => {
cy.get(':nth-child(1) > .sc-liAPKD').should('not.be.empty')
cy.get(':nth-child(1) > .sc-fiCYzP').should('not.be.empty')
cy.get(':nth-child(2) > .sc-hfVBHA').should('not.be.empty')
cy.get(':nth-child(2) > .sc-fiCYzP').should('not.be.empty')
cy.get(':nth-child(3) > .sc-hfVBHA').should('not.be.empty')
cy.get(':nth-child(3) > .sc-fiCYzP').should('not.be.empty')
cy.get(':nth-child(4) > .sc-hfVBHA').should('not.be.empty')
cy.get(':nth-child(4) > .sc-hfVBHA').should('not.be.empty')
cy.get(':nth-child(5) > .sc-hfVBHA').should('not.be.empty')
cy.get(':nth-child(5) > .sc-fiCYzP').should('not.be.empty')
})
})
context('My Assets', () => {
it('should have a title', () => {
cy.get('[data-cy=my-assets]').should('contain', 'My Assets')
})
it('should render values', () => {
cy.get('[data-cy=my-assets-token-balance]').should('not.be.empty')
cy.get('[data-cy=my-assets-token-balance]').should('contain', 'BTC2x-FLI')
})
it('should contain MetaMask button', () => {
cy.get('.sc-kJNqyW').should('not.be.empty')
})
})
context('Product Changes', () => {
it('should have a title', () => {
cy.get('[data-cy=changes]').should('contain', 'Changes')
})
it('should render values', () => {
cy.get(':nth-child(1) > .sc-bTJQgd').should('not.be.empty')
cy.get(':nth-child(1) > .sc-hQYpqk').should('not.be.empty')
cy.get(':nth-child(2) > .sc-bTJQgd').should('not.be.empty')
cy.get(':nth-child(2) > .sc-hQYpqk').should('not.be.empty')
cy.get(':nth-child(3) > .sc-bTJQgd').should('not.be.empty')
cy.get(':nth-child(3) > .sc-hQYpqk').should('not.be.empty')
cy.get(':nth-child(4) > .sc-bTJQgd').should('not.be.empty')
cy.get(':nth-child(4) > .sc-hQYpqk').should('not.be.empty')
})
})
context('Product Allocations', () => {
it('should have a title', () => {
cy.get('[data-cy=allocations]').should('contain', 'Allocations')
})
it('should render allocations', () => {
cy.get('.sc-jVSGNQ > :nth-child(2) > .sc-QxirK').should('not.be.empty')
cy.get(':nth-child(3) > .sc-QxirK').should('not.be.empty')
cy.get(':nth-child(4) > .sc-QxirK').should('not.be.empty')
cy.get('.sc-jVSGNQ > :nth-child(5)').should('not.be.empty')
cy.get('.sc-jVSGNQ > :nth-child(6)').should('not.be.empty')
})
})
context('Product Content', () => {
it('should not be empty', () => {
cy.get('.sc-gVtoEh > :nth-child(5)').should('not.be.empty')
})
})
})
|
#!/bin/sh
set -eux
IS_CONTAINER=${IS_CONTAINER:-false}
CONTAINER_RUNTIME="${CONTAINER_RUNTIME:-docker}"
if [ "${IS_CONTAINER}" != "false" ]; then
export XDG_CACHE_HOME=/tmp/.cache
mkdir /tmp/unit
cp -r . /tmp/unit
cp -r /usr/local/kubebuilder/bin /tmp/unit/hack/tools
cd /tmp/unit
make test
else
"${CONTAINER_RUNTIME}" run --rm \
--env IS_CONTAINER=TRUE \
--volume "${PWD}:/go/src/github.com/metal3-io/hardware-classification-controller:ro,z" \
--entrypoint sh \
--workdir /go/src/github.com/metal3-io/hardware-classification-controller \
quay.io/metal3-io/capm3-unit:master \
/go/src/github.com/metal3-io/hardware-classification-controller/hack/unit.sh "${@}"
fi;
|
#!/bin/bash
# run-shellcheck
#
# CIS Debian Hardening
#
#
# 5.2.15 Ensure only strong Key Exchange algorithms are used (Scored)
#
set -e # One error, it's over
set -u # One variable unset, it's over
# shellcheck disable=2034
HARDENING_LEVEL=2
# shellcheck disable=2034
DESCRIPTION="Checking key exchange ciphers."
PACKAGE='openssh-server'
OPTIONS=''
FILE='/etc/ssh/sshd_config'
# This function will be called if the script status is on enabled / audit mode
audit() {
is_pkg_installed "$PACKAGE"
if [ "$FNRET" != 0 ]; then
ok "$PACKAGE is not installed!"
else
ok "$PACKAGE is installed"
for SSH_OPTION in $OPTIONS; do
SSH_PARAM=$(echo "$SSH_OPTION" | cut -d= -f 1)
SSH_VALUE=$(echo "$SSH_OPTION" | cut -d= -f 2)
PATTERN="^${SSH_PARAM}[[:space:]]*$SSH_VALUE"
does_pattern_exist_in_file_nocase "$FILE" "$PATTERN"
if [ "$FNRET" = 0 ]; then
ok "$PATTERN is present in $FILE"
else
crit "$PATTERN is not present in $FILE"
fi
done
fi
}
# This function will be called if the script status is on enabled mode
apply() {
is_pkg_installed "$PACKAGE"
if [ "$FNRET" = 0 ]; then
ok "$PACKAGE is installed"
else
crit "$PACKAGE is absent, installing it"
apt_install "$PACKAGE"
fi
for SSH_OPTION in $OPTIONS; do
SSH_PARAM=$(echo "$SSH_OPTION" | cut -d= -f 1)
SSH_VALUE=$(echo "$SSH_OPTION" | cut -d= -f 2)
PATTERN="^${SSH_PARAM}[[:space:]]*$SSH_VALUE"
does_pattern_exist_in_file_nocase "$FILE" "$PATTERN"
if [ "$FNRET" = 0 ]; then
ok "$PATTERN is present in $FILE"
else
warn "$PATTERN is not present in $FILE, adding it"
does_pattern_exist_in_file_nocase "$FILE" "^${SSH_PARAM}"
if [ "$FNRET" != 0 ]; then
add_end_of_file "$FILE" "$SSH_PARAM $SSH_VALUE"
else
info "Parameter $SSH_PARAM is present but with the wrong value -- Fixing"
replace_in_file "$FILE" "^${SSH_PARAM}[[:space:]]*.*" "$SSH_PARAM $SSH_VALUE"
fi
/etc/init.d/ssh reload >/dev/null 2>&1
fi
done
}
create_config() {
set +u
debug "Debian version : $DEB_MAJ_VER "
if [[ 7 -le "$DEB_MAJ_VER" ]]; then
KEX='diffie-hellman-group-exchange-sha256'
else
KEX='curve25519-sha256,curve25519-sha256@libssh.org,diffie-hellman-group14-sha256,diffie-hellman-group16-sha512,diffie-hellman-group18-sha512,ecdh-sha2-nistp521,ecdh-sha2-nistp384,ecdh-sha2-nistp256,diffie-hellman-group-exchange-sha256'
fi
set -u
cat <<EOF
status=audit
# Put your KexAlgorithms
OPTIONS="KexAlgorithms=$KEX"
EOF
}
# This function will check config parameters required
check_config() {
:
}
# Source Root Dir Parameter
if [ -r /etc/default/cis-hardening ]; then
# shellcheck source=../../debian/default
. /etc/default/cis-hardening
fi
if [ -z "$CIS_ROOT_DIR" ]; then
echo "There is no /etc/default/cis-hardening file nor cis-hardening directory in current environment."
echo "Cannot source CIS_ROOT_DIR variable, aborting."
exit 128
fi
# Main function, will call the proper functions given the configuration (audit, enabled, disabled)
if [ -r "$CIS_ROOT_DIR"/lib/main.sh ]; then
# shellcheck source=../../lib/main.sh
. "$CIS_ROOT_DIR"/lib/main.sh
else
echo "Cannot find main.sh, have you correctly defined your root directory? Current value is $CIS_ROOT_DIR in /etc/default/cis-hardening"
exit 128
fi
|
package helpers
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"os"
"os/exec"
"time"
"github.com/codeskyblue/kexec"
"github.com/go-logr/logr"
"github.com/pkg/errors"
"github.com/epinio/epinio/helpers/termui"
)
type ExternalFuncWithString func() (output string, err error)
type ExternalFunc func() (err error)
func RunProc(dir string, toStdout bool, cmd string, args ...string) (string, error) {
if os.Getenv("DEBUG") == "true" {
fmt.Printf("Executing: %s %v (in: %s)\n", cmd, args, dir)
}
p := kexec.Command(cmd, args...)
var b bytes.Buffer
if toStdout {
p.Stdout = io.MultiWriter(os.Stdout, &b)
p.Stderr = io.MultiWriter(os.Stderr, &b)
} else {
p.Stdout = &b
p.Stderr = &b
}
p.Dir = dir
if err := p.Run(); err != nil {
return b.String(), err
}
err := p.Wait()
return b.String(), err
}
func RunProcNoErr(dir string, toStdout bool, cmd string, args ...string) (string, error) {
if os.Getenv("DEBUG") == "true" {
fmt.Printf("Executing %s %v\n", cmd, args)
}
p := kexec.Command(cmd, args...)
var b bytes.Buffer
if toStdout {
p.Stdout = io.MultiWriter(os.Stdout, &b)
p.Stderr = nil
} else {
p.Stdout = &b
p.Stderr = nil
}
p.Dir = dir
if err := p.Run(); err != nil {
return b.String(), err
}
err := p.Wait()
return b.String(), err
}
// CreateTmpFile creates a temporary file on the disk with the given contents
// and returns the path to it and an error if something goes wrong.
func CreateTmpFile(contents string) (string, error) {
tmpfile, err := ioutil.TempFile("", "epinio")
if err != nil {
return tmpfile.Name(), err
}
if _, err := tmpfile.Write([]byte(contents)); err != nil {
return tmpfile.Name(), err
}
if err := tmpfile.Close(); err != nil {
return tmpfile.Name(), err
}
return tmpfile.Name(), nil
}
// Kubectl invokes the `kubectl` command in PATH, running the specified command.
// It returns the command output and/or error.
func Kubectl(command ...string) (string, error) {
_, err := exec.LookPath("kubectl")
if err != nil {
return "", errors.Wrap(err, "kubectl not in path")
}
currentdir, err := os.Getwd()
if err != nil {
return "", err
}
return RunProc(currentdir, false, "kubectl", command...)
}
// WaitForCommandCompletion prints progress dots until the func completes
func WaitForCommandCompletion(ui *termui.UI, message string, funk ExternalFuncWithString) (string, error) {
s := ui.Progressf(" %s", message)
defer s.Stop()
return funk()
}
// ExecToSuccessWithTimeout retries the given function with string & error return,
// until it either succeeds of the timeout is reached. It retries every "interval" duration.
func ExecToSuccessWithTimeout(funk ExternalFuncWithString, log logr.Logger, timeout, interval time.Duration) (string, error) {
timeoutChan := time.After(timeout)
for {
select {
case <-timeoutChan:
return "", errors.Errorf("Timed out after %s", timeout.String())
default:
if out, err := funk(); err != nil {
log.Info(fmt.Sprintf("Retrying because of error: %s\n%s", err.Error(), out))
time.Sleep(interval)
} else {
return out, nil
}
}
}
}
// RunToSuccessWithTimeout retries the given function with error return,
// until it either succeeds or the timeout is reached. It retries every "interval" duration.
func RunToSuccessWithTimeout(funk ExternalFunc, timeout, interval time.Duration) error {
timeoutChan := time.After(timeout)
for {
select {
case <-timeoutChan:
return fmt.Errorf("Timed out after %s", timeout.String())
default:
if err := funk(); err != nil {
time.Sleep(interval)
} else {
return nil
}
}
}
}
|
package io.snyk.plugin.datamodel
import io.circe.derivation.{deriveDecoder, deriveEncoder}
import io.circe.{Decoder, Encoder, JsonObject, ObjectEncoder}
import cats.syntax.functor._
import io.circe.derivation._
import io.circe.syntax._
case class Semver(vulnerable: Seq[String])
case class MavenModuleName(
groupId: String,
artifactId: String
)
sealed trait Vulnerability
case class SecurityVuln(
title : String,
credit : Seq[String],
description : String,
moduleName : String,
language : String,
packageManager : String,
semver : Semver,
identifiers : Map[String, Seq[String]],
CVSSv2 : Option[String],
severity : String,
creationTime : String,
modificationTime : String,
publicationTime : String,
disclosureTime : String,
id : String,
mavenModuleName : MavenModuleName,
CVSSv3 : String,
packageName : String,
cvssScore : Float,
from : Seq[String],
upgradePath : Seq[Either[Boolean, String]],
version : String,
name : String,
isUpgradable : Boolean,
isPatchable : Boolean,
filtered : Option[VulnFilteredInfo]
) extends Vulnerability {
/**
* @return A combined string of the Snyk ID and CWE ID (if present)
*/
def combinedId: String = {
val cwe = identifiers.get("CWE").flatMap(_.headOption).filterNot(_.isEmpty)
val cweSuffix = cwe.map(" (" + _ + ")").getOrElse("")
s"$id$cweSuffix"
}
/**
* @return A sequence of just the String components in the upgrade path, or "n/a"
*/
def normalisedUpgradePath: Seq[String] =
if(isUpgradable) { upgradePath.flatMap(_.right.toSeq) } else Seq("n/a")
def toMiniVuln: MiniVuln = MiniVuln from this
}
case class LicenseVuln(
`type` : String,
license : String,
licenseTemplateUrl : Either[String, Array[String]],
title : String,
description : String,
language : String,
packageManager : String,
packageName : String,
semver : Semver,
severity : String,
creationTime : String,
publicationTime : String,
id : String,
from : Seq[String],
upgradePath : Seq[Either[Boolean, String]],
version : String,
name : String,
isUpgradable : Boolean,
isPatchable : Boolean
) extends Vulnerability
case class IgnoredBy(
id : String,
name : String,
email : String
)
object IgnoredBy {
implicit val encoder: ObjectEncoder[IgnoredBy] = deriveEncoder
implicit val decoder: Decoder[IgnoredBy] = deriveDecoder
}
case class VulnIgnoredInfo(
reason : String,
created : String,
expires : String,
ignoredBy : IgnoredBy,
reasonType : String,
disregardIfFixable : Boolean,
source : String,
path : Seq[String]
)
object VulnIgnoredInfo {
implicit val encoder: ObjectEncoder[VulnIgnoredInfo] = deriveEncoder
implicit val decoder: Decoder[VulnIgnoredInfo] = deriveDecoder
}
case class VulnFilteredInfo(ignored: Seq[VulnIgnoredInfo])
object VulnFilteredInfo {
implicit val encoder: ObjectEncoder[VulnFilteredInfo] = deriveEncoder
implicit val decoder: Decoder[VulnFilteredInfo] = deriveDecoder
}
case class LicensesPolicy(severities: Map[String, String])
case class TopLevelFilteredInfo(
ignore: Seq[Vulnerability],
patch: Seq[JsonObject]
)
case class SnykVulnResponse(
ok : Boolean,
vulnerabilities : Seq[Vulnerability],
dependencyCount : Int,
org : String,
licensesPolicy : Option[LicensesPolicy],
isPrivate : Boolean,
packageManager : String,
policy : String,
ignoreSettings : Option[Map[String, Boolean]],
summary : String,
filesystemPolicy : Option[Boolean],
filtered : Option[TopLevelFilteredInfo],
uniqueCount : Option[Int],
path : Option[String]
) {
def ignoredVulnerabilities: Seq[Vulnerability] = filtered.toSeq.flatMap(_.ignore)
lazy val securityVulns: Seq[SecurityVuln] =
vulnerabilities collect { case sv: SecurityVuln => sv }
lazy val ignoredSecurityVulns: Seq[SecurityVuln] =
ignoredVulnerabilities collect { case sv: SecurityVuln => sv }
lazy val flatMiniVulns: Seq[MiniVuln] = {
// val vulns: Seq[MiniVuln] = (securityVulns.map(MiniVuln.from) ++ ignoredVulnerabilities.map(MiniVuln.from)).distinct
securityVulns.map(MiniVuln.from).distinct
}
lazy val mergedMiniVulns: Seq[MiniVuln] = {
MiniVuln.merge(flatMiniVulns)
}
def isEmpty: Boolean = this eq SnykVulnResponse.empty
}
object SnykVulnResponse {
val empty: SnykVulnResponse = SnykVulnResponse(
ok = true,
vulnerabilities = Nil,
dependencyCount = 0,
org = "",
licensesPolicy = None,
isPrivate = false,
packageManager = "",
policy = "",
ignoreSettings = None,
summary = "",
filesystemPolicy = None,
filtered = None,
uniqueCount = None,
path = None
)
trait LowPriorityJsonCodecs {
implicit def encodeEither[A, B](implicit a: Encoder[A], b: Encoder[B]): Encoder[Either[A, B]] = {
o: Either[A, B] => o.fold(_.asJson, _.asJson)
}
}
object JsonCodecs extends LowPriorityJsonCodecs with io.circe.java8.time.TimeInstances {
implicit def decodeEither[A,B](implicit a: Decoder[A], b: Decoder[B]): Decoder[Either[A,B]] = {
val l: Decoder[Either[A,B]] = a.map(Left.apply)
val r: Decoder[Either[A,B]] = b.map(Right.apply)
l or r
}
implicit def objectEncodeEither[A, B](
implicit
a: ObjectEncoder[A],
b: ObjectEncoder[B]
): ObjectEncoder[Either[A, B]] = {
o: Either[A, B] => o.fold(_.asJsonObject, _.asJsonObject)
}
// We wse semi-auto derivation from circe-deriving here instead of fully-auto derivation from circe-generic
// On the down-side, it means that we actually have to write some code, but in exchange it gives us
// *significantly* faster compilation and we get a smaller plugin by removing remove some transitive deps
// (including shapeless)
//
// If these 10 lines of code (at time of writing) ever *truly* become a problem,
// we can switch to circe-magnolia-derivation
implicit val decoderVulnIgnoredBy : Decoder[IgnoredBy] = deriveDecoder
implicit val decoderVulnIgnoredInfo : Decoder[VulnIgnoredInfo] = deriveDecoder
implicit val decoderVulnFilteredInfo : Decoder[VulnFilteredInfo] = deriveDecoder
implicit val decoderLicensesPolicy : Decoder[LicensesPolicy] = deriveDecoder
implicit val decoderMavenModuleName : Decoder[MavenModuleName] = deriveDecoder
implicit val decoderSemver : Decoder[Semver] = deriveDecoder
implicit val decoderSecurityVuln : Decoder[SecurityVuln] = deriveDecoder
implicit val decoderLicenseVuln : Decoder[LicenseVuln] = deriveDecoder
implicit val decoderVulnerability: Decoder[Vulnerability] =
decoderSecurityVuln.widen or decoderLicenseVuln.widen
implicit val decoderTopLevelFilteredInfo : Decoder[TopLevelFilteredInfo] = deriveDecoder
implicit val decoderSnykVulnResponse : Decoder[SnykVulnResponse] = deriveDecoder
implicit val encoderVulnIgnoredBy : ObjectEncoder[IgnoredBy] = deriveEncoder
implicit val encoderVulnIgnoredInfo : ObjectEncoder[VulnIgnoredInfo] = deriveEncoder
implicit val encoderVulnFilteredInfo : ObjectEncoder[VulnFilteredInfo] = deriveEncoder
implicit val encoderLicensesPolicy : ObjectEncoder[LicensesPolicy] = deriveEncoder
implicit val encoderMavenModuleName : ObjectEncoder[MavenModuleName] = deriveEncoder
implicit val encoderSemver : ObjectEncoder[Semver] = deriveEncoder
implicit val encoderSecurityVuln : ObjectEncoder[SecurityVuln] = deriveEncoder
implicit val encoderLicenseVuln : ObjectEncoder[LicenseVuln] = deriveEncoder
implicit val encoderVulnerability: ObjectEncoder[Vulnerability] = ObjectEncoder.instance {
case sec: SecurityVuln => sec.asJsonObject
case lic: LicenseVuln => lic.asJsonObject
}
implicit val encoderTopLevelFilteredInfo : ObjectEncoder[TopLevelFilteredInfo] = deriveEncoder
implicit val encoderSnykVulnResponse : ObjectEncoder[SnykVulnResponse] = deriveEncoder
}
}
|
/**
* Module dependencies
*/
var express = require('express');
var fs = require('fs');
var mongoose = require('mongoose');
var Schema = mongoose.Schema;
// img path
var imgPath = '/path/to/some/img.png';
// connect to mongo
mongoose.connect('localhost', 'testing_storeImg');
// example schema
var schema = new Schema({
img: { data: Buffer, contentType: String }
});
// our model
var A = mongoose.model('A', schema);
mongoose.connection.on('open', function () {
console.error('mongo is open');
// empty the collection
A.remove(function (err) {
if (err) throw err;
console.error('removed old docs');
// store an img in binary in mongo
var a = new A;
a.img.data = fs.readFileSync(imgPath);
a.img.contentType = 'image/png';
a.save(function (err, a) {
if (err) throw err;
console.error('saved img to mongo');
// start a demo server
var server = express.createServer();
server.get('/', function (req, res, next) {
A.findById(a, function (err, doc) {
if (err) return next(err);
res.contentType(doc.img.contentType);
res.send(doc.img.data);
});
});
server.on('close', function () {
console.error('dropping db');
mongoose.connection.db.dropDatabase(function () {
console.error('closing db connection');
mongoose.connection.close();
});
});
// Loads mikeal/request Node.js library.
var request = require('request');
// Specify the encoding (the important is to keep the same when creating the buffer, after)
// If you only give the URL, it brakes the downloaded data, I didn't found an other way to do it.
request({
url: 'http://www.cedynamix.fr/wp-content/uploads/Tux/Tux-G2.png',
encoding: 'binary'
}, function(error, response, body) {
if (!error && response.statusCode === 200) {
body = new Buffer(body, 'binary');
// Here "body" can be affected to the "a.img.data"
// var a = new A;
// a.img.data = body;
// ....
}
});
server.listen(3333, function (err) {
var address = server.address();
console.error('server listening on http://%s:%d', address.address, address.port);
console.error('press CTRL+C to exit');
});
process.on('SIGINT', function () {
server.close();
});
});
});
});
|
<reponame>Stylite-Y/XArm-Simulation<gh_stars>0
import os
import numpy as np
from numpy.core.fromnumeric import ptp
import raisimpy as raisim
import time
raisim.World.setLicenseFile(os.path.dirname(os.path.abspath(__file__)) + "/activation.raisim")
# LISM_urdf_file = os.path.dirname(os.path.abspath(__file__)) + "/urdf/urdf/black_panther.urdf"
LISM_urdf_file = os.path.dirname(os.path.abspath(__file__)) + "/urdf/LISM_Arm_sim.urdf"
world = raisim.World()
t_step = 0.00005
world.setTimeStep(t_step)
ground = world.addGround(0)
world.setDefaultMaterial(1, 1, 1)
world.setMaterialPairProp("rubber", "rub", 1, 0, 0)
gravity = world.getGravity()
# print(g)
LISM = world.addArticulatedSystem(LISM_urdf_file)
LISM.setName("LISM")
# print(LISM.getDOF()
jointNominalConfig = np.array([-0.1, 0, -1.57])
jointVelocityTarget = np.array([-10, 0, 0])
LISM.setGeneralizedCoordinate(jointNominalConfig)
LISM.setGeneralizedVelocity(jointVelocityTarget)
LISM.setControlMode(raisim.ControlMode.PD_PLUS_FEEDFORWARD_TORQUE)
JointPosInit, JointVelInit = LISM.getState()
AbadFrameId = LISM.getFrameIdxByName("abad_upper_r")
AbadPos = LISM.getFramePosition(AbadFrameId)
FootFrameId = LISM.getFrameIdxByName("toe_fr_joint")
FootPosInit = LISM.getFramePosition(FootFrameId)
print(FootPosInit)
print(AbadFrameId)
BallFrameId = LISM.getFrameIdxByName("base_ball")
BallPosInit = LISM.getFramePosition(BallFrameId)
print(BallPosInit)
print(BallFrameId)
server = raisim.RaisimServer(world)
server.launchServer(8080)
UpperArmLength = 0.2
LowerArmLength = 0.2
flag = 0
m = 1
con_flag = 0
f1 = 25
fun_flag = 0
# x_init = 0.4 # init position
# v_init = 10 # init velocity
x_top = 0.6 # dribbling height of ball
v_ref = -15 # desired velocity
mass = 0.5
x_ref = 0.35
g = -gravity[2]
# k_vir = 1000
# f2 = 500
def ParamsCal(x0, v0, x_top, xref, vref, f1):
dx_up = x_top - x0
dx_down = x_top - xref
k_vir = (mass * v0 ** 2 - 2 * mass * g * dx_up - 2 * f1 * dx_up) / ((x_top - xref) ** 2 - (x0 - xref) ** 2)
f2 = (mass * vref ** 2 - 2 * mass * g * dx_down - k_vir * dx_down ** 2) / (2 * dx_down)
print("dx_up and dx_down is ", dx_up, dx_down)
if k_vir < 0:
raise ValueError('invalid value: k_vir is negative, can not sqrt:')
return k_vir, f2
for i in range(500000):
time.sleep(0.001)
# if i == 0:
# server.startRecordingVideo("v10_with-x_1x.mp4")
BallPos = LISM.getFramePosition(BallFrameId)
BallVel = LISM.getFrameVelocity(BallFrameId)
JointPos, JointVec = LISM.getState()
FootPos = LISM.getFramePosition(FootFrameId)
FootVel = LISM.getFrameVelocity(FootFrameId)
jointPgain = np.array([0, 0, 0])
jointDgain = np.array([0, 0, 0])
LISM.setPdGains(jointPgain, jointDgain)
ContactPoint = LISM.getContacts()
contact_flag = False
for c in ContactPoint:
contact_flag = c.getlocalBodyIndex() == LISM.getBodyIdx("lower_r")
if(contact_flag):
break
pass
if con_flag == 0 and contact_flag:
con_flag = 1
# print("the contact vel of ball", BallVel[2])
if BallPos[2] >= (x_ref-0.1275) and con_flag == 1:
if BallVel[2] > 0 and fun_flag == 0:
dx_up = x_top - FootPos[2]
if (dx_up * 2 * g) >= (BallVel[2] ** 2):
raise FloatingPointError("calculate Error: init velocity is too small or the heaving height is too high!")
else:
k_vir, f2 = ParamsCal(FootPos[2], BallVel[2], x_top, x_ref, v_ref, f1)
fun_flag = 1
print("contact point pos and vel of ball ", FootPos[2], BallVel[2])
print("the k_vir and f1, f2 is ", k_vir, f1, f2)
jointPgain = np.zeros(LISM.getDOF())
jointDgain = np.zeros(LISM.getDOF())
LISM.setPdGains(jointPgain, jointDgain)
JointPos, JointVec = LISM.getState()
FootPos = LISM.getFramePosition(FootFrameId)
FootVel = LISM.getFrameVelocity(FootFrameId)
## Force kinematics
# jacobian matrix of force transmission
a11 = - UpperArmLength * np.cos(JointPos[1]) - LowerArmLength * np.cos(JointPos[1] + JointPos[2])
a12 = UpperArmLength * np.sin(JointPos[1]) + LowerArmLength * np.sin(JointPos[1] + JointPos[2])
a21 = - LowerArmLength * np.cos(JointPos[1] + JointPos[2])
# print(np.cos(JointPos[1] + JointPos[2]))
a22 = LowerArmLength * np.sin(JointPos[1] + JointPos[2])
Jacobin_F = np.array([[a11, a12],
[a21, a22]])
# print(Jacobin_F)
if BallVel[2] <0.1 and BallVel[2] > 0:
print("the highest ball and foot pos: ", BallPos[2] ,FootPos[2])
if BallVel[2] > 0 and contact_flag:
# ContactPointVel = LISM.getContactPointVel(ContactPoint[0].getlocalBodyIndex())
ContactPointVel = ContactPoint[0].getImpulse()
ContactForce = ContactPointVel / t_step
EndForce = - k_vir * (FootPos[2] - x_ref) - f1
# EndForce = - 2000 * (FootPos[2] - FootPosInit[2])
EndForce_x = - 10000 * (FootPos[0] - FootPosInit[0])
# EndForce_x = 0
# EndForce = 800
# print("1", ContactForce[2])
# LISM.setGeneralizedForce([50, 50])
# elif BallVel[2] > 0 and BallPos[2] > 0.6:
# EndForce = - 10000 * (FootPos[2] - 0.4) - f1
# # EndForce = - 2000 * (FootPos[2] - FootPosInit[2])
# EndForce_x = - 10000 * (FootPos[0] - FootPosInit[0])
elif BallVel[2] <= 0 and contact_flag == True:
# EndForce = 2000 * (FootPos[2] - FootPosInit[2]) + 150
EndForce = - k_vir * (FootPos[2] - x_ref) - f2
EndForce_x = - 10000 * (FootPos[0] - FootPosInit[0])
# print("contact foot pos: ", FootPos[2], EndForce)
# print("contact ball pos: ", BallPos[2])
# print("2")
# print("k_vir", k_vir)
# elif BallVel[2] <= 0 and contact_flag == False:
# k_vir2 = 2000
# EndForce = - k_vir2 * (FootPos[2] - 0.4) - f2
# # EndForce = - 2000 * (FootPos[2] - FootPosInit[2])
# EndForce_x = - 10000 * (FootPos[0] - FootPosInit[0])
# print("nocontact foot pos: ", FootPos[2], EndForce)
# print("nocontact ball pos: ", BallPos[2])
# print("k_vir2", k_vir2)
JointForce_z = EndForce
JointForce_x = EndForce_x
Torque_1 = (Jacobin_F[0, 0] * JointForce_x + Jacobin_F[0, 1] * JointForce_z)
Torque_2 = (Jacobin_F[1, 0] * JointForce_x + Jacobin_F[1, 1] * JointForce_z)
Torque_1_z = (Jacobin_F[0, 1] * JointForce_z)
Torque_2_z = (Jacobin_F[1, 1] * JointForce_z)
Torque_1_x = (Jacobin_F[0, 0] * JointForce_x)
Torque_2_x = (Jacobin_F[1, 0] * JointForce_x)
# print(EndForce, Torque_1, Torque_2)
flag = 1
LISM.setGeneralizedForce([0, Torque_1, Torque_2])
# torque = LISM.getGeneralizedForce()
# print("ball vel, foot vel: ", BallVel[2], FootVel[2])
# print("foot pso: ", FootPos)
# print("torque: ", EndForce, Torque_1_z, Torque_2_z)
# print(Torque_1_z, Torque_2_z, Torque_1_x, Torque_2_x)
# print("============================")
# LISM.setGeneralizedForce([200, 200])
elif BallPos[2] < 0.3:
if flag == 1:
print("the leave pos and vel of ball", FootPos[2], BallVel[2])
print("*********************************************************")
LISM.setGeneralizedForce([0, 0, 0])
jointNominalConfig = np.array([0, 0.0, -1.57])
jointVelocityTarget = np.zeros([LISM.getDOF()])
jointPgain = np.array([0, 10000, 10000])
jointDgain = np.array([0, 100, 100])
LISM.setPdGains(jointPgain, jointDgain)
LISM.setPdTarget(jointNominalConfig, jointVelocityTarget)
# force = LISM.getGeneralizedForce()
# print("Joint position: ", JointPos)
# print("joint force: ", force)
# print("PD", m)
m = m + 1
# jointNominalConfig = np.array([0, 0.0, -1.57])
# jointVelocityTarget = np.zeros([LISM.getDOF()])
# LISM.setGeneralizedCoordinate(jointNominalConfig)
flag = 0
con_flag = 0
fun_flag = 0
# server.integrateWorldThreadSafe()
# if i == 20000:
# raisim. stopRecordingVideo()
world.integrate()
server.killServer()
|
<reponame>rochaa/crud-auth-api-nestjs<filename>src/modules/adm/accounts/accounts.service.ts
import { Injectable, UnauthorizedException } from '@nestjs/common';
import { Md5 } from "md5-typescript";
import { AuthService } from '../../../shared/auth/auth.service';
import { Guid } from 'guid-typescript';
import { UsersService } from '../users/users.service';
import { Password } from '../../../utils/password';
import { UserStatus } from '../users/users.enum';
import { ChangePasswordDto } from './dto/change-password.dto';
import { ResultExceptionDto } from '../../../shared/result/result-exception.dto';
@Injectable()
export class AccountsService {
constructor(
private readonly usersService: UsersService,
private readonly authService: AuthService
) { }
async authenticate(email: string, password: string) {
const user = await this.validateAccount(email, password);
return await this.authService.createToken(user.email, user.roles);
}
async resetPassword(email: string) {
// TODO: Enviar E-mail com a senha
const password = Guid.create().toString().substring(0, 8).replace('-', '');
const passwordEncrypted = Password.encriptyPassword(password);
return await this.usersService.updatePassword(email, passwordEncrypted);
}
async changePassword(email: string, passwordDto: ChangePasswordDto) {
await this.validateAccount(email, passwordDto.password);
return await this.usersService.updatePassword(email, passwordDto.newPassword);
}
private async validateAccount(email: string, password: string) {
const user = await this.usersService.findByEmail(email);
const pass = Md5.init(`${password}${process.env.GIDU_JWT_KEY}`);
if (!user || (pass.toString() != user.password.toString()))
throw new UnauthorizedException(new ResultExceptionDto('Usuário ou senha inválidos', null));
if (user.status == UserStatus.Inativo)
throw new UnauthorizedException(new ResultExceptionDto('Usuário inativo', null));
return user;
}
}
|
package borip
import (
"encoding/binary"
"errors"
"net"
)
const (
defaultBufferSize = 256 * 1024
packetHeaderSize = 4
)
var ErrShortPacket = errors.New("borip: short packet")
const (
FlagNone = 0x00
FlagHardwareOverrun = 0x01 // Used at hardware interface
FlagNetworkOverrun = 0x02 // Used at client (network too slow)
FlagBufferOverrun = 0x04 // Used at client (client consumer too slow)
FlagEmptyPayload = 0x08 // Reserved
FlagStreamStart = 0x10 // Used for first packet of newly started stream
FlagStremEnd = 0x20 // Reserved (TO DO: Server sends BF_EMPTY_PAYLOAD | BF_STREAM_END)
FlagBufferUnderrun = 0x40 // Used at hardware interface
FlagHardwareTimeout = 0x80 // Used at hardware interface
)
type PacketHeader struct {
Flags byte
Notification byte // Reserved (currently 0)
Idx uint16 // Sequence number (incremented each time a packet is sent, used by client to count dropped packets)
}
type PacketReader struct {
conn net.PacketConn
buf []byte
bufI, bufN int
withHeaders bool
header PacketHeader
}
func NewPacketReader(conn net.PacketConn, withHeaders bool) *PacketReader {
return &PacketReader{
conn: conn,
buf: make([]byte, defaultBufferSize),
withHeaders: withHeaders,
}
}
func (rd *PacketReader) Header() PacketHeader {
return rd.header
}
func (rd *PacketReader) ReadSamples(samples []complex128) (int, error) {
if rd.bufI >= rd.bufN {
n, _, err := rd.conn.ReadFrom(rd.buf)
if err != nil {
return 0, err
}
rd.bufI = 0
rd.bufN = n
if rd.withHeaders {
if n < packetHeaderSize {
return 0, ErrShortPacket
}
rd.header.Flags = rd.buf[rd.bufI]
rd.header.Notification = rd.buf[rd.bufI+1]
rd.header.Idx = binary.LittleEndian.Uint16(rd.buf[2:4])
rd.bufI += 4
}
// rd.bufN = n - (n & 7)
}
idx := 0
for rd.bufI < rd.bufN {
iReal := int16(binary.LittleEndian.Uint16(rd.buf[rd.bufI : rd.bufI+2]))
qQmag := int16(binary.LittleEndian.Uint16(rd.buf[rd.bufI+2 : rd.bufI+4]))
samples[idx] = complex(float64(iReal), float64(qQmag))
idx++
rd.bufI += 4
}
return idx, nil
}
|
apt-get -y update
curl -sL https://deb.nodesource.com/setup_7.x | sudo -E bash -
apt-get install -y nodejs
npm install --global gulp-cli
cd /vagrant
npm install
|
<gh_stars>0
import { matchUpFormatCode } from '..';
const validFormats = [
{
name: 'Standard Match',
format: 'SET3-S:6/TB7',
obj: {
bestOf: 3,
setFormat: { setTo: 6, tiebreakAt: 6, tiebreakFormat: { tiebreakTo: 7 } }
}
},
{
name: 'Short Sets',
format: 'SET3-S:4/TB7',
obj: {
bestOf: 3,
setFormat: { setTo: 4, tiebreakAt: 4, tiebreakFormat: { tiebreakTo: 7 } }
}
},
{
name: 'Fast 4',
format: 'SET3-S:4/TB5@3',
obj: {
bestOf: 3,
setFormat: { setTo: 4, tiebreakAt: 3, tiebreakFormat: { tiebreakTo: 5 } }
}
},
{
name: '<NAME> 1971',
format: 'SET5-S:6/TB9-F:6',
obj: {
bestOf: 5,
setFormat: { setTo: 6, tiebreakAt: 6, tiebreakFormat: { tiebreakTo: 9 } },
finalSetFormat: { setTo: 6, noTiebreak: true }
}
},
{
name: '<NAME>018',
format: 'SET5-S:6/TB7-F:6',
obj: {
bestOf: 5,
setFormat: { setTo: 6, tiebreakAt: 6, tiebreakFormat: { tiebreakTo: 7 } },
finalSetFormat: { setTo: 6, noTiebreak: true }
}
},
{
name: '<NAME>019',
format: 'SET5-S:6/TB7-F:6/TB7@12',
obj: {
bestOf: 5,
setFormat: { setTo: 6, tiebreakAt: 6, tiebreakFormat: { tiebreakTo: 7 } },
finalSetFormat: {
setTo: 6,
tiebreakAt: 12,
tiebreakFormat: { tiebreakTo: 7 }
}
}
},
{
name: 'Australian Open Singles from 2019',
format: 'SET5-S:6/TB7-F:6/TB10',
obj: {
bestOf: 5,
setFormat: { setTo: 6, tiebreakAt: 6, tiebreakFormat: { tiebreakTo: 7 } },
finalSetFormat: {
setTo: 6,
tiebreakAt: 6,
tiebreakFormat: { tiebreakTo: 10 }
}
}
},
{
name: '<NAME>',
format: 'SET5-S:5NOAD/TB9NOAD@4',
obj: {
bestOf: 5,
setFormat: {
setTo: 5,
NoAD: true,
tiebreakAt: 4,
tiebreakFormat: { tiebreakTo: 9, NoAD: true }
}
}
},
{
name: 'Tiebreak Only Match',
format: 'SET3-S:TB10',
obj: {
bestOf: 3,
setFormat: { tiebreakSet: { tiebreakTo: 10 } }
}
},
{
name: '<NAME>',
format: 'SET3-S:6/TB7-F:TB10',
obj: {
bestOf: 3,
setFormat: { setTo: 6, tiebreakAt: 6, tiebreakFormat: { tiebreakTo: 7 } },
finalSetFormat: { tiebreakSet: { tiebreakTo: 10 } }
}
},
{
name: 'Pro Set',
format: 'SET1-S:8/TB7',
obj: {
bestOf: 1,
setFormat: { setTo: 8, tiebreakAt: 8, tiebreakFormat: { tiebreakTo: 7 } }
}
},
{
name: 'College Pro Set',
format: 'SET1-S:8/TB7@7',
obj: {
bestOf: 1,
setFormat: { setTo: 8, tiebreakAt: 7, tiebreakFormat: { tiebreakTo: 7 } }
}
},
{
name: '3 timed sets',
format: 'SET3-S:T20-F:T60',
obj: {
bestOf: 3,
setFormat: { timed: true, minutes: 20 },
finalSetFormat: { timed: true, minutes: 60 }
}
},
{
format: 'SET1-S:T120',
obj: {
bestOf: 1,
setFormat: { timed: true, minutes: 120 }
}
},
{
format: 'SET1-S:T90',
obj: {
bestOf: 1,
setFormat: { timed: true, minutes: 90 }
}
},
{
format: 'SET1-S:T60',
obj: {
bestOf: 1,
setFormat: { timed: true, minutes: 60 }
}
},
{
format: 'SET1-S:T30',
obj: {
bestOf: 1,
setFormat: { timed: true, minutes: 30 }
}
}
];
const singleSetTimed = [
{
format: 'T120',
obj: {
bestOf: 1,
setFormat: { timed: true, minutes: 120 }
}
},
{
format: 'T90',
obj: {
bestOf: 1,
setFormat: { timed: true, minutes: 90 }
}
},
{
format: 'T60',
obj: {
bestOf: 1,
setFormat: { timed: true, minutes: 60 }
}
},
{
format: 'T30',
obj: {
bestOf: 1,
setFormat: { timed: true, minutes: 30 }
}
}
];
const invalidFormats = [
'',
'T',
'90',
'T90X',
'T90@',
'SET3-S:6/TB',
'SET3-S:6/TB7@',
'SET5-S:6/T9-F:6',
'SET-S:6/TB7-F:6',
'SET35-S:6/TB7-F:TB10',
'SET5-S:6/TB7-X:6/TB10',
'SET5-S:5NOAD/TB9NOD@4',
'SET5-S:5NAD/TB9NOAD@4',
'SET5-S:6/TB7F:6/TB7@12'
];
it('recognizes valid formats', () => {
validFormats.forEach(validFormat => {
const valid = matchUpFormatCode.isValidMatchUpFormat(validFormat.format);
expect(valid).toEqual(true);
});
});
it('recognizes invalid formats', () => {
invalidFormats.forEach(matchUpFormat => {
const valid = matchUpFormatCode.isValidMatchUpFormat(matchUpFormat);
expect(valid).toEqual(false);
});
});
it('recognizes valid timed formats', () => {
singleSetTimed.forEach(({ format }) => {
const valid = matchUpFormatCode.isValidMatchUpFormat(format);
expect(valid).toEqual(true);
});
});
it('match format suite', () => {
// round trip conversion tests
validFormats.forEach(sf => {
expect(matchUpFormatCode.stringify(matchUpFormatCode.parse(sf.format))).toEqual(sf.format);
});
// return expected objects
validFormats.forEach(sf => {
if (sf.obj) expect(matchUpFormatCode.parse(sf.format)).toMatchObject(sf.obj);
});
singleSetTimed.forEach(sf => {
expect(matchUpFormatCode.parse(sf.format)).toEqual(sf.obj);
});
// recognize invalid formats and return undefined
invalidFormats.forEach(sf => {
expect(matchUpFormatCode.parse(sf)).toEqual(undefined);
});
});
it('handles tiebreakAt: false and tiebreakFormat/tiebreakTo: false', () => {
const testFormat = {
bestOf: 3,
finalSetFormat: {
noTiebreak: true,
setTo: 6,
tiebreakAt: false,
tiebreakFormat: { tiebreakTo: false }
},
setFormat: {
noTiebreak: true,
setTo: 6,
tiebreakAt: 6,
tiebreakFormat: { tiebreakTo: false }
}
};
const result = matchUpFormatCode.stringify(testFormat);
expect(result).toEqual('SET3-S:6');
});
it('parse and stringify format for multiple timed sets', () => {
const scoreFormat = {
format: 'SET3-S:T20-F:T60',
obj: {
bestOf: 3,
setFormat: { timed: true, minutes: 20 },
finalSetFormat: { timed: true, minutes: 60 }
}
};
const parsed = matchUpFormatCode.parse(scoreFormat.format);
expect(parsed).toMatchObject(scoreFormat.obj);
const stringified = matchUpFormatCode.stringify(scoreFormat.obj);
expect(stringified).toEqual(scoreFormat.format);
});
it('will not include final set code when equivalent to other sets', () => {
const obj = {
bestOf: 3,
setFormat: { setTo: 6, tiebreakAt: 6, tiebreakFormat: { tiebreakTo: 7 } },
finalSetFormat: { setTo: 6, tiebreakAt: 6, tiebreakFormat: { tiebreakTo: 7 } }
};
expect(matchUpFormatCode.stringify(obj)).toEqual('SET3-S:6/TB7');
});
|
[ -z "${MULLE_VIRTUAL_ROOT}" -o -z "${MULLE_UNAME}" ] && \
echo "Your script needs to setup MULLE_VIRTUAL_ROOT \
and MULLE_UNAME properly" >&2 && exit 1
MULLE_ENV_SHARE_DIR="${MULLE_VIRTUAL_ROOT}/.mulle-env/share"
MULLE_ENV_ETC_DIR="${MULLE_VIRTUAL_ROOT}/.mulle-env/etc"
# Top/down order of inclusion. Left overrides right if present.
# Keep these files (except environment-custom.sh) clean off manual edits so
# that mulle-env can read and set environment variables.
#
# .mulle-env/etc | .mulle-env/share
# --------------------------------------|--------------------
# | environment-plugin.sh
# | environment-os-${MULLE_UNAME}.sh
# | environment-project.sh
# | environment-extension.sh
# environment-global.sh |
# environment-os-${MULLE_UNAME}.sh |
# environment-host-${MULLE_HOSTNAME}.sh |
# environment-user-${USER}.sh |
# environment-custom.sh |
#
#
# The plugin file, if present is to be set by a mulle-env plugin
#
if [ -f "${MULLE_ENV_SHARE_DIR}/environment-plugin.sh" ]
then
. "${MULLE_ENV_SHARE_DIR}/environment-plugin.sh"
fi
#
# The plugin file, if present is to be set by a mulle-env plugin
#
if [ -f "${MULLE_ENV_SHARE_DIR}/environment-plugin-os${MULLE_UNAME}.sh" ]
then
. "${MULLE_ENV_SHARE_DIR}/environment-plugin-os${MULLE_UNAME}.sh"
fi
#
# The project file, if present is to be set by mulle-sde init itself
# w/o extensions
#
if [ -f "${MULLE_ENV_SHARE_DIR}/environment-project.sh" ]
then
. "${MULLE_ENV_SHARE_DIR}/environment-project.sh"
fi
#
# The extension file, if present is to be set by mulle-sde extensions.
#
if [ -f "${MULLE_ENV_SHARE_DIR}/environment-extension.sh" ]
then
. "${MULLE_ENV_SHARE_DIR}/environment-extension.sh"
fi
#
# Global user settings
#
if [ -f "${MULLE_ENV_ETC_DIR}/environment-global.sh" ]
then
. "${MULLE_ENV_ETC_DIR}/environment-global.sh"
fi
#
# Load in some user modifications depending on os, hostname, username.
#
if [ -f "${MULLE_ENV_ETC_DIR}/environment-host-${MULLE_HOSTNAME}.sh" ]
then
. "${MULLE_ENV_ETC_DIR}/environment-host-${MULLE_HOSTNAME}.sh"
fi
if [ -f "${MULLE_ENV_ETC_DIR}/environment-os-${MULLE_UNAME}.sh" ]
then
. "${MULLE_ENV_ETC_DIR}/environment-os-${MULLE_UNAME}.sh"
fi
if [ -f "${MULLE_ENV_ETC_DIR}/environment-user-${USER}.sh" ]
then
. "${MULLE_ENV_ETC_DIR}/environment-user-${USER}.sh"
fi
#
# For more complex edits, that don't work with the cmdline tool
#
if [ -f "${MULLE_ENV_ETC_DIR}/environment-custom.sh" ]
then
. "${MULLE_ENV_ETC_DIR}/environment-custom.sh"
fi
unset MULLE_ENV_ETC_DIR
unset MULLE_ENV_SHARE_DIR
|
/*
* Bin.java
*
* Created on March 9, 2007, 9:05 PM
*
* From "Multiprocessor Synchronization and Concurrent Data Structures",
* by <NAME> and <NAME>.
* Copyright 2007 Elsevier Inc. All rights reserved.
*/
package tamp.ch15.priority.priority;
import java.util.ArrayList;
import java.util.List;
/**
* Simple bin implementation used to test priority queues.
*
* @param T item type
* @author mph
*/
public class Bin<T> {
List<T> list;
public Bin() {
list = new ArrayList<T>();
}
synchronized void put(T item) {
list.add(item);
}
synchronized T get() {
try {
return list.remove(0);
} catch (IndexOutOfBoundsException e) {
return null;
}
}
synchronized boolean isEmpty() {
return list.isEmpty();
}
}
|
#!/bin/sh -l
LERNA_VERSION=$(grep -m1 version lerna.json | awk -F: '{ print $2 }' | sed 's/[", ]//g')
echo ::set-output name=lerna-version::$LERNA_VERSION
|
import { SparqlQueryResult, SparqlQueryRecord, SparqlUri, SparqlLiteral, SparqlBlankNode, SparqlVariableBindingValue } from "./sparql-models";
import { Observable } from "rxjs";
import { InjectionToken } from "@angular/core";
export interface ISparqlQueryStatus {
// undefined indicates the user hasn't executed any query yet.
status?: "busy" | "successful" | "failed";
message?: string;
}
export interface ISparqlService {
/**
* The observable that emits the SPARQL query result.
*/
readonly currentResult: Observable<SparqlQueryResult>;
/**
* The observable that emits the current query status.
* This observable always emits the next item BEFORE {@link #currentResult}.
*/
readonly currentStatus: Observable<ISparqlQueryStatus>;
executeQuery(queryExpr: string);
}
export const ISparqlServiceInjectionToken = new InjectionToken<ISparqlService>("DI.ISparqlService");
export const SparqlResultsNamespace = "http://www.w3.org/2005/sparql-results#";
const XmlMetaNamespace = "http://www.w3.org/XML/1998/namespace";
export function ParseQueryResult(rawResult: string): SparqlQueryResult {
const result = new SparqlQueryResult();
const parser = new DOMParser();
const doc = parser.parseFromString(rawResult, "text/xml");
const root = doc.documentElement;
const nsResolver: XPathNSResolver = {
lookupNamespaceURI: prefix => {
if (prefix === "r") { return SparqlResultsNamespace; }
return null;
}
};
// c.f. https://www.w3.org/2001/sw/DataAccess/rf1/
// Get variable names.
const variables = evaluateXPathAndMap(doc, "/r:sparql/r:head/r:variable", root, nsResolver,
node => (node as Element).getAttribute("name"));
result.variables = variables;
const booleanNode = evaluateXPathAndTakeFirst(doc, "/r:sparql/r:boolean", root, nsResolver);
if (booleanNode) {
const value = booleanNode.textContent.trim().toLowerCase();
switch (value) {
case "true": result.resultBoolean = true; break;
case "false": result.resultBoolean = false; break;
default:
console.warn("Cannot parse <boolean> value: " + value + ".");
break;
}
return;
}
result.records = evaluateXPathAndMap(doc, "/r:sparql/r:results/r:result", root, nsResolver,
node => {
const bindings: { [key: string]: SparqlVariableBindingValue } = {};
for (let i = 0; i < node.childNodes.length; i++) {
const bnode = node.childNodes[i] as Element;
console.assert(bnode instanceof Element);
if (bnode.nodeType !== Node.ELEMENT_NODE) { continue; }
if (bnode.localName !== "binding") { continue; }
// <binding name="variable_name">
const belement = bnode as Element;
const name = belement.getAttribute("name");
const uriNode = evaluateXPathAndTakeFirst(doc, "./r:uri", bnode, nsResolver);
if (uriNode) {
bindings[name] = new SparqlUri(uriNode.textContent.trim());
continue;
}
const literalNode = evaluateXPathAndTakeFirst(doc, "./r:literal", bnode, nsResolver) as Element;
if (literalNode) {
bindings[name] = new SparqlLiteral(literalNode.textContent.trim(),
literalNode.getAttributeNS(XmlMetaNamespace, "lang"),
literalNode.getAttribute("datatype"));
continue;
}
const blankNode = evaluateXPathAndTakeFirst(doc, "./r:bnode", bnode, nsResolver);
if (blankNode) {
bindings[name] = new SparqlBlankNode(blankNode.textContent.trim());
continue;
}
console.warn("Cannot parse result value binding.", bnode);
}
return new SparqlQueryRecord(bindings);
});
return result;
}
function evaluateXPathAndTakeFirst(doc: Document, expression: string, contextNode: Node, resolver: XPathNSResolver): Node {
const iterator = doc.evaluate(expression, contextNode, resolver, XPathResult.FIRST_ORDERED_NODE_TYPE, null);
return iterator.singleNodeValue;
}
function evaluateXPathAndMap<T>(doc: Document, expression: string, contextNode: Node, resolver: XPathNSResolver,
selector: (node: Node, index: Number) => T): T[] {
const iterator = doc.evaluate(expression, contextNode, resolver, XPathResult.ORDERED_NODE_ITERATOR_TYPE, null);
return mapXPathResult(iterator, selector);
}
function mapXPathResult<T>(result: XPathResult, selector: (node: Node, index: Number) => T): T[] {
let node: Node;
const mapped: T[] = [];
while (node = result.iterateNext()) {
mapped.push(selector(node, mapped.length));
}
return mapped;
}
|
from typing import List
def calculateFrequency(t: str) -> List[int]:
sequences = ["TTT", "TTH", "THH", "HHT", "HTT", "HTH", "HHH"]
frequency = [0] * 7
for i in range(len(t) - 2):
current_sequence = t[i:i+3]
if current_sequence in sequences:
index = sequences.index(current_sequence)
if frequency[index] == 0: # Count the sequence only once at its first occurrence
frequency[index] += 1
return frequency
|
#!/bin/bash
if [[ $target_platform =~ linux.* ]] || [[ $target_platform == win-32 ]] || [[ $target_platform == win-64 ]] || [[ $target_platform == osx-64 ]]; then
export DISABLE_AUTOBREW=1
$R CMD INSTALL --build .
else
mkdir -p $PREFIX/lib/R/library/xaringan
mv * $PREFIX/lib/R/library/xaringan
if [[ $target_platform == osx-64 ]]; then
pushd $PREFIX
for libdir in lib/R/lib lib/R/modules lib/R/library lib/R/bin/exec sysroot/usr/lib; do
pushd $libdir || exit 1
for SHARED_LIB in $(find . -type f -iname "*.dylib" -or -iname "*.so" -or -iname "R"); do
echo "fixing SHARED_LIB $SHARED_LIB"
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5.0-MRO/Resources/lib/libR.dylib "$PREFIX"/lib/R/lib/libR.dylib $SHARED_LIB || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libR.dylib "$PREFIX"/lib/R/lib/libR.dylib $SHARED_LIB || true
install_name_tool -change /usr/local/clang4/lib/libomp.dylib "$PREFIX"/lib/libomp.dylib $SHARED_LIB || true
install_name_tool -change /usr/local/gfortran/lib/libgfortran.3.dylib "$PREFIX"/lib/libgfortran.3.dylib $SHARED_LIB || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libquadmath.0.dylib "$PREFIX"/lib/libquadmath.0.dylib $SHARED_LIB || true
install_name_tool -change /usr/local/gfortran/lib/libquadmath.0.dylib "$PREFIX"/lib/libquadmath.0.dylib $SHARED_LIB || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libgfortran.3.dylib "$PREFIX"/lib/libgfortran.3.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libgcc_s.1.dylib "$PREFIX"/lib/libgcc_s.1.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libiconv.2.dylib "$PREFIX"/sysroot/usr/lib/libiconv.2.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libncurses.5.4.dylib "$PREFIX"/sysroot/usr/lib/libncurses.5.4.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libicucore.A.dylib "$PREFIX"/sysroot/usr/lib/libicucore.A.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libexpat.1.dylib "$PREFIX"/lib/libexpat.1.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libcurl.4.dylib "$PREFIX"/lib/libcurl.4.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libc++.1.dylib "$PREFIX"/lib/libc++.1.dylib $SHARED_LIB || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libc++.1.dylib "$PREFIX"/lib/libc++.1.dylib $SHARED_LIB || true
done
popd
done
popd
fi
fi
|
#!/bin/bash
# Copyright 2017 The Openstack-Helm Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
set -xe
#NOTE: Pull images and lint chart
make pull-images cinder
#NOTE: Deploy command
: ${OSH_EXTRA_HELM_ARGS:=""}
tee /tmp/cinder.yaml <<EOF
conf:
ceph:
pools:
backup:
replication: 1
crush_rule: same_host
chunk_size: 8
volume:
replication: 1
crush_rule: same_host
chunk_size: 8
EOF
helm upgrade --install cinder ./cinder \
--namespace=openstack \
--values=/tmp/cinder.yaml \
${OSH_EXTRA_HELM_ARGS} \
${OSH_EXTRA_HELM_ARGS_CINDER}
#NOTE: Wait for deploy
./tools/deployment/common/wait-for-pods.sh openstack
#NOTE: Validate Deployment info
export OS_CLOUD=openstack_helm
openstack service list
sleep 30 #NOTE(portdirect): Wait for ingress controller to update rules and restart Nginx
openstack volume type list
|
<filename>dev-app/routes/components/timeline/theming/index.ts
/*
Copyright 2020, Verizon Media
Licensed under the terms of the MIT license. See the LICENSE file in the project root for license terms.
*/
export class TimelineBlockThemeProperties {
public timelineBlockThemeCols = [
{
_class: 'monospaced',
colHeadName: 'name',
colHeadValue: 'Name',
},
{
_class: 'monospaced',
colHeadName: 'property',
colHeadValue: 'CSS Property',
},
{
_class: 'monospaced',
colClass: 't450',
colHeadName: 'default',
colHeadValue: 'Bindable Theme',
},
];
public timelineBlockThemeProperties = [
{
default: 'solid 1px #444',
name: '--time-block-border-bottom',
property: 'border-bottom',
},
{
default: 'var(--c_smoke)',
name: '--time-color',
property: 'color',
},
{
default: '12px',
name: '--time-font-size',
property: 'font-size',
},
];
public timelineBlockThemeProperties2 = [
{
default: '#00AC3E',
name: '--current-time-line-color',
property: 'background',
},
{
default: 'rgba(0, 0, 0, 0.5)',
name: '--loading-spinner-background',
property: 'background',
},
];
public timelineBlockThemeProperties3 = [
{
default: 'var(--c_slate)',
name: '--timeline-week-dates-background',
property: 'background',
},
{
default: 'solid 1px #444',
name: '--timeline-week-dates-border-bottom',
property: 'border-bottom',
},
{
default: 'var(--c_marshmellow)',
name: '--timeline-week-dates-link-color',
property: 'color',
},
{
default: 'var(--c_smoke)',
name: '--timeline-week-dates-link-color-hover',
property: 'color',
},
{
default: 'solid 1px var(--c_gray)',
name: '--timeline-week-dates-first-border-right',
property: 'border-right',
},
{
default: 'solid 1px var(--c_gray)',
name: '--timeline-week-content-border-left',
property: 'border-left',
},
{
default: 'var(--c_primaryMain)',
name: '--timeline-week-dates-today-color',
property: 'background',
},
];
}
|
<reponame>MarcelBraghetto/AndroidNanoDegree2016
package com.lilarcor.popularmovies.framework.movies.data.contentprovider;
import android.content.ContentProvider;
import android.content.ContentValues;
import android.content.Context;
import android.content.UriMatcher;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.database.sqlite.SQLiteStatement;
import android.net.Uri;
import android.support.annotation.NonNull;
import static com.lilarcor.popularmovies.framework.movies.data.contentprovider.MoviesContentContract.*;
import static com.lilarcor.popularmovies.framework.movies.data.contentprovider.MoviesContentContract.CONTENT_AUTHORITY;
import static com.lilarcor.popularmovies.framework.movies.data.contentprovider.MoviesContentContract.CONTENT_PATH_ALL_FAVOURITE_MOVIES;
import static com.lilarcor.popularmovies.framework.movies.data.contentprovider.MoviesContentContract.CONTENT_PATH_MOVIES;
import static com.lilarcor.popularmovies.framework.movies.data.contentprovider.MoviesContentContract.CONTENT_PATH_MOVIE_REVIEWS;
import static com.lilarcor.popularmovies.framework.movies.data.contentprovider.MoviesContentContract.CONTENT_PATH_MOVIE_VIDEOS;
import static com.lilarcor.popularmovies.framework.movies.data.contentprovider.MoviesContentContract.CONTENT_PATH_POPULAR_MOVIES;
import static com.lilarcor.popularmovies.framework.movies.data.contentprovider.MoviesContentContract.CONTENT_PATH_TOP_RATED_MOVIES;
import static com.lilarcor.popularmovies.framework.movies.data.contentprovider.MoviesContentContract.MovieVideos;
import static com.lilarcor.popularmovies.framework.movies.data.contentprovider.MoviesContentContract.Movies;
import static com.lilarcor.popularmovies.framework.movies.data.contentprovider.MoviesContentContract.PopularMovies;
import static com.lilarcor.popularmovies.framework.movies.data.contentprovider.MoviesContentContract.TopRatedMovies;
/**
* Created by Marcel Braghetto on 14/07/15.
*
* Movies content provider user for all data access to
* manage movie related persistent data.
*
* This provider is backed by an SQLite database.
*/
public class MoviesContentProvider extends ContentProvider {
private static final int MATCHER_ID_MOVIE = 100;
private static final int MATCHER_ID_ALL_MOVIES = 101;
private static final int MATCHER_ID_ALL_FAVOURITE_MOVIES = 201;
private static final int MATCHER_ID_MOVIE_VIDEO = 250;
private static final int MATCHER_ID_ALL_MOVIE_VIDEOS = 251;
private static final int MATCHER_ID_MOVIE_REVIEW = 260;
private static final int MATCHER_ID_ALL_MOVIE_REVIEWS = 261;
private static final int MATCHER_ID_POPULAR_MOVIE = 300;
private static final int MATCHER_ID_ALL_POPULAR_MOVIES = 301;
private static final int MATCHER_ID_TOP_RATED_MOVIE = 400;
private static final int MATCHER_ID_ALL_TOP_RATED_MOVIES = 401;
private final UriMatcher mUriMatcher;
private SQLiteDatabase mMoviesDatabase;
public MoviesContentProvider() {
mUriMatcher = new UriMatcher(UriMatcher.NO_MATCH);
mUriMatcher.addURI(CONTENT_AUTHORITY, CONTENT_PATH_MOVIES + "/#", MATCHER_ID_MOVIE);
mUriMatcher.addURI(CONTENT_AUTHORITY, CONTENT_PATH_MOVIES, MATCHER_ID_ALL_MOVIES);
mUriMatcher.addURI(CONTENT_AUTHORITY, CONTENT_PATH_ALL_FAVOURITE_MOVIES, MATCHER_ID_ALL_FAVOURITE_MOVIES);
mUriMatcher.addURI(CONTENT_AUTHORITY, CONTENT_PATH_MOVIE_VIDEOS + "/#", MATCHER_ID_MOVIE_VIDEO);
mUriMatcher.addURI(CONTENT_AUTHORITY, CONTENT_PATH_MOVIE_VIDEOS, MATCHER_ID_ALL_MOVIE_VIDEOS);
mUriMatcher.addURI(CONTENT_AUTHORITY, CONTENT_PATH_MOVIE_REVIEWS + "/#", MATCHER_ID_MOVIE_REVIEW);
mUriMatcher.addURI(CONTENT_AUTHORITY, CONTENT_PATH_MOVIE_REVIEWS, MATCHER_ID_ALL_MOVIE_REVIEWS);
mUriMatcher.addURI(CONTENT_AUTHORITY, CONTENT_PATH_POPULAR_MOVIES + "/#", MATCHER_ID_POPULAR_MOVIE);
mUriMatcher.addURI(CONTENT_AUTHORITY, CONTENT_PATH_POPULAR_MOVIES, MATCHER_ID_ALL_POPULAR_MOVIES);
mUriMatcher.addURI(CONTENT_AUTHORITY, CONTENT_PATH_TOP_RATED_MOVIES + "/#", MATCHER_ID_TOP_RATED_MOVIE);
mUriMatcher.addURI(CONTENT_AUTHORITY, CONTENT_PATH_TOP_RATED_MOVIES, MATCHER_ID_ALL_TOP_RATED_MOVIES);
}
@Override
public boolean onCreate() {
mMoviesDatabase = new MoviesDatabaseHelper(getContext()).getWritableDatabase();
return true;
}
@Override
public synchronized Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) {
Cursor cursor;
switch(mUriMatcher.match(uri)) {
case MATCHER_ID_MOVIE: {
String id = uri.getLastPathSegment();
String query = "SELECT * FROM " + Movies.TABLE_NAME + " WHERE " + Movies._ID + " = ?";
cursor = mMoviesDatabase.rawQuery(query, new String[]{id});
}
break;
case MATCHER_ID_ALL_MOVIES: {
String query = "SELECT * FROM " + Movies.TABLE_NAME;
cursor = mMoviesDatabase.rawQuery(query, null);
}
break;
case MATCHER_ID_ALL_FAVOURITE_MOVIES: {
String query = "SELECT * FROM " + Movies.TABLE_NAME + " WHERE " + Movies.COLUMN_MOVIE_IS_FAVOURITE + " > 0";
cursor = mMoviesDatabase.rawQuery(query, null);
}
break;
case MATCHER_ID_MOVIE_VIDEO: {
String id = uri.getLastPathSegment();
String query = "SELECT * FROM " + MovieVideos.TABLE_NAME + " WHERE " + MovieVideos.COLUMN_MOVIE_ID + " = ?";
cursor = mMoviesDatabase.rawQuery(query, new String[]{id});
}
break;
case MATCHER_ID_ALL_MOVIE_VIDEOS: {
String query = "SELECT * FROM " + MovieVideos.TABLE_NAME;
cursor = mMoviesDatabase.rawQuery(query, null);
}
break;
case MATCHER_ID_MOVIE_REVIEW: {
String id = uri.getLastPathSegment();
String query = "SELECT * FROM " + MovieReviews.TABLE_NAME + " WHERE " + MovieReviews.COLUMN_MOVIE_ID + " = ?";
cursor = mMoviesDatabase.rawQuery(query, new String[]{id});
}
break;
case MATCHER_ID_ALL_MOVIE_REVIEWS: {
String query = "SELECT * FROM " + MovieReviews.TABLE_NAME;
cursor = mMoviesDatabase.rawQuery(query, null);
}
break;
case MATCHER_ID_ALL_POPULAR_MOVIES: {
String query =
"SELECT * FROM " + PopularMovies.TABLE_NAME + " a " +
"INNER JOIN " + Movies.TABLE_NAME + " b " +
"ON a." + PopularMovies.COLUMN_MOVIE_ID + " = " + "b." + Movies._ID;
cursor = mMoviesDatabase.rawQuery(query, null);
}
break;
case MATCHER_ID_ALL_TOP_RATED_MOVIES: {
String query =
"SELECT * FROM " + TopRatedMovies.TABLE_NAME + " a " +
"INNER JOIN " + Movies.TABLE_NAME + " b " +
"ON a." + TopRatedMovies.COLUMN_MOVIE_ID + " = " + "b." + Movies._ID;
cursor = mMoviesDatabase.rawQuery(query, null);
}
break;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
if(cursor != null) {
cursor.setNotificationUri(getContext().getContentResolver(), uri);
}
return cursor;
}
@Override
public String getType(Uri uri) {
final int match = mUriMatcher.match(uri);
String type;
switch(match) {
case MATCHER_ID_MOVIE: {
type = Movies.getContentItemType();
}
break;
case MATCHER_ID_ALL_MOVIES: {
type = Movies.getContentType();
}
break;
case MATCHER_ID_ALL_FAVOURITE_MOVIES: {
type = Movies.getContentType();
}
break;
case MATCHER_ID_MOVIE_VIDEO: {
type = MovieVideos.getContentItemType();
}
break;
case MATCHER_ID_ALL_MOVIE_VIDEOS: {
type = MovieVideos.getContentType();
}
break;
case MATCHER_ID_MOVIE_REVIEW: {
type = MovieReviews.getContentItemType();
}
break;
case MATCHER_ID_ALL_MOVIE_REVIEWS: {
type = MovieReviews.getContentType();
}
break;
case MATCHER_ID_ALL_POPULAR_MOVIES: {
type = PopularMovies.getContentType();
}
break;
case MATCHER_ID_ALL_TOP_RATED_MOVIES: {
type = TopRatedMovies.getContentType();
}
break;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
return type;
}
@Override
public synchronized Uri insert(Uri uri, ContentValues contentValues) {
throw new UnsupportedOperationException("Individual inserts are not supported in this content provider. Use bulk insert instead.");
/*
final int match = mUriMatcher.match(uri);
switch(match) {
case MATCHER_ID_ALL_POPULAR_MOVIES:
mMoviesDatabase.insertWithOnConflict(PopularMovies.TABLE_NAME, null, contentValues, SQLiteDatabase.CONFLICT_REPLACE);
break;
case MATCHER_ID_ALL_TOP_RATED_MOVIES:
mMoviesDatabase.insertWithOnConflict(TopRatedMovies.TABLE_NAME, null, contentValues, SQLiteDatabase.CONFLICT_REPLACE);
break;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
getContext().getContentResolver().notifyChange(uri, null, false);
return uri;
*/
}
@Override
public synchronized int delete(Uri uri, String selection, String[] selectionArgs) {
int rowsDeleted;
switch(mUriMatcher.match(uri)) {
case MATCHER_ID_MOVIE: {
String id = uri.getLastPathSegment();
rowsDeleted = mMoviesDatabase.delete(Movies.TABLE_NAME, Movies._ID + " = ?", new String[]{id});
}
break;
case MATCHER_ID_ALL_MOVIES: {
rowsDeleted = mMoviesDatabase.delete(Movies.TABLE_NAME, selection, selectionArgs);
}
break;
case MATCHER_ID_ALL_POPULAR_MOVIES: {
rowsDeleted = mMoviesDatabase.delete(PopularMovies.TABLE_NAME, selection, selectionArgs);
}
break;
case MATCHER_ID_MOVIE_VIDEO: {
String id = uri.getLastPathSegment();
rowsDeleted = mMoviesDatabase.delete(MovieVideos.TABLE_NAME, MovieVideos.COLUMN_MOVIE_ID + " = ?", new String[]{id});
}
break;
case MATCHER_ID_ALL_MOVIE_VIDEOS: {
rowsDeleted = mMoviesDatabase.delete(MovieVideos.TABLE_NAME, selection, selectionArgs);
}
break;
case MATCHER_ID_MOVIE_REVIEW: {
String id = uri.getLastPathSegment();
rowsDeleted = mMoviesDatabase.delete(MovieReviews.TABLE_NAME, MovieReviews.COLUMN_MOVIE_ID + " = ?", new String[]{id});
}
break;
case MATCHER_ID_ALL_MOVIE_REVIEWS: {
rowsDeleted = mMoviesDatabase.delete(MovieReviews.TABLE_NAME, selection, selectionArgs);
}
break;
case MATCHER_ID_ALL_TOP_RATED_MOVIES: {
rowsDeleted = mMoviesDatabase.delete(TopRatedMovies.TABLE_NAME, selection, selectionArgs);
}
break;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
getContext().getContentResolver().notifyChange(uri, null);
return rowsDeleted;
}
@Override
public synchronized int update(Uri uri, ContentValues values, String selection, String[] selectionArgs) {
int rowsUpdated;
switch(mUriMatcher.match(uri)) {
case MATCHER_ID_MOVIE: {
String id = uri.getLastPathSegment();
rowsUpdated = mMoviesDatabase.update(Movies.TABLE_NAME, values, Movies._ID + " = ?", new String[]{id});
}
break;
case MATCHER_ID_ALL_MOVIES: {
rowsUpdated = mMoviesDatabase.update(Movies.TABLE_NAME, values, selection, selectionArgs);
}
break;
case MATCHER_ID_POPULAR_MOVIE: {
String id = uri.getLastPathSegment();
rowsUpdated = mMoviesDatabase.update(PopularMovies.TABLE_NAME, values, PopularMovies._ID + " = ?", new String[]{id});
}
break;
case MATCHER_ID_ALL_POPULAR_MOVIES: {
rowsUpdated = mMoviesDatabase.update(PopularMovies.TABLE_NAME, values, selection, selectionArgs);
}
break;
case MATCHER_ID_TOP_RATED_MOVIE: {
String id = uri.getLastPathSegment();
rowsUpdated = mMoviesDatabase.update(TopRatedMovies.TABLE_NAME, values, TopRatedMovies._ID + " = ?", new String[]{id});
}
break;
case MATCHER_ID_ALL_TOP_RATED_MOVIES: {
rowsUpdated = mMoviesDatabase.update(TopRatedMovies.TABLE_NAME, values, selection, selectionArgs);
}
break;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
getContext().getContentResolver().notifyChange(uri, null);
return rowsUpdated;
}
@Override
public synchronized int bulkInsert(@NonNull Uri uri, @NonNull ContentValues[] values) {
switch(mUriMatcher.match(uri)) {
case MATCHER_ID_ALL_MOVIES: {
SQLiteStatement movieExistsStatement = mMoviesDatabase.compileStatement(Movies.getMovieExistsSql());
SQLiteStatement movieInsertStatement = mMoviesDatabase.compileStatement(Movies.getInsertSql());
SQLiteStatement movieUpdatedStatement = mMoviesDatabase.compileStatement(Movies.getUpdateSql());
mMoviesDatabase.beginTransaction();
try {
for (ContentValues value : values) {
movieExistsStatement.clearBindings();
movieExistsStatement.bindString(1, value.getAsString(Movies._ID));
if (movieExistsStatement.simpleQueryForLong() == 0L) {
// Do insert
movieInsertStatement.clearBindings();
movieInsertStatement.bindLong(1, value.getAsLong(Movies._ID));
movieInsertStatement.bindString(2, value.getAsString(Movies.COLUMN_MOVIE_TITLE));
movieInsertStatement.bindString(3, value.getAsString(Movies.COLUMN_MOVIE_OVERVIEW));
movieInsertStatement.bindString(4, value.getAsString(Movies.COLUMN_MOVIE_RELEASE_DATE));
movieInsertStatement.bindString(5, value.getAsString(Movies.COLUMN_MOVIE_POSTER_PATH));
movieInsertStatement.bindString(6, value.getAsString(Movies.COLUMN_MOVIE_BACKDROP_PATH));
movieInsertStatement.bindDouble(7, value.getAsDouble(Movies.COLUMN_MOVIE_VOTE_AVERAGE));
movieInsertStatement.bindLong(8, value.getAsLong(Movies.COLUMN_MOVIE_VOTE_COUNT));
movieInsertStatement.bindLong(9, value.getAsLong(Movies.COLUMN_MOVIE_IS_FAVOURITE));
movieInsertStatement.execute();
} else {
// update
movieUpdatedStatement.clearBindings();
movieUpdatedStatement.bindString(1, value.getAsString(Movies.COLUMN_MOVIE_TITLE));
movieUpdatedStatement.bindString(2, value.getAsString(Movies.COLUMN_MOVIE_OVERVIEW));
movieUpdatedStatement.bindString(3, value.getAsString(Movies.COLUMN_MOVIE_RELEASE_DATE));
movieUpdatedStatement.bindString(4, value.getAsString(Movies.COLUMN_MOVIE_POSTER_PATH));
movieUpdatedStatement.bindString(5, value.getAsString(Movies.COLUMN_MOVIE_BACKDROP_PATH));
movieUpdatedStatement.bindDouble(6, value.getAsDouble(Movies.COLUMN_MOVIE_VOTE_AVERAGE));
movieUpdatedStatement.bindLong(7, value.getAsLong(Movies.COLUMN_MOVIE_VOTE_COUNT));
movieUpdatedStatement.bindLong(8, value.getAsLong(Movies._ID));
movieUpdatedStatement.execute();
}
}
mMoviesDatabase.setTransactionSuccessful();
} finally {
mMoviesDatabase.endTransaction();
}
}
break;
case MATCHER_ID_ALL_MOVIE_VIDEOS: {
SQLiteStatement insertStatement = mMoviesDatabase.compileStatement(MovieVideos.getInsertSql());
mMoviesDatabase.beginTransaction();
try {
for (ContentValues value : values) {
insertStatement.clearBindings();
insertStatement.bindLong(1, value.getAsLong(MovieVideos.COLUMN_MOVIE_ID));
insertStatement.bindString(2, value.getAsString(MovieVideos.COLUMN_VIDEO_ID));
insertStatement.bindString(3, value.getAsString(MovieVideos.COLUMN_VIDEO_KEY));
insertStatement.bindString(4, value.getAsString(MovieVideos.COLUMN_VIDEO_TITLE));
insertStatement.bindString(5, value.getAsString(MovieVideos.COLUMN_VIDEO_SITE));
insertStatement.execute();
}
mMoviesDatabase.setTransactionSuccessful();
} finally {
mMoviesDatabase.endTransaction();
}
}
break;
case MATCHER_ID_ALL_MOVIE_REVIEWS: {
SQLiteStatement insertStatement = mMoviesDatabase.compileStatement(MovieReviews.getInsertSql());
mMoviesDatabase.beginTransaction();
try {
for (ContentValues value : values) {
insertStatement.clearBindings();
insertStatement.bindLong(1, value.getAsLong(MovieReviews.COLUMN_MOVIE_ID));
insertStatement.bindString(2, value.getAsString(MovieReviews.COLUMN_REVIEW_ID));
insertStatement.bindString(3, value.getAsString(MovieReviews.COLUMN_REVIEW_AUTHOR));
insertStatement.bindString(4, value.getAsString(MovieReviews.COLUMN_REVIEW_CONTENT));
insertStatement.execute();
}
mMoviesDatabase.setTransactionSuccessful();
} finally {
mMoviesDatabase.endTransaction();
}
}
break;
case MATCHER_ID_ALL_POPULAR_MOVIES: {
SQLiteStatement insertStatement = mMoviesDatabase.compileStatement(PopularMovies.getInsertSql());
mMoviesDatabase.beginTransaction();
try {
for (ContentValues value : values) {
insertStatement.clearBindings();
insertStatement.bindLong(1, value.getAsLong(PopularMovies.COLUMN_MOVIE_ID));
insertStatement.bindLong(2, value.getAsLong(PopularMovies.COLUMN_RESULT_PAGE));
insertStatement.execute();
}
mMoviesDatabase.setTransactionSuccessful();
} finally {
mMoviesDatabase.endTransaction();
}
}
break;
case MATCHER_ID_ALL_TOP_RATED_MOVIES: {
SQLiteStatement insertStatement = mMoviesDatabase.compileStatement(TopRatedMovies.getInsertSql());
mMoviesDatabase.beginTransaction();
try {
for (ContentValues value : values) {
insertStatement.clearBindings();
insertStatement.bindLong(1, value.getAsLong(PopularMovies.COLUMN_MOVIE_ID));
insertStatement.bindLong(2, value.getAsLong(PopularMovies.COLUMN_RESULT_PAGE));
insertStatement.execute();
}
mMoviesDatabase.setTransactionSuccessful();
} finally {
mMoviesDatabase.endTransaction();
}
}
break;
default:
throw new UnsupportedOperationException("Unsupported uri: " + uri);
}
getContext().getContentResolver().notifyChange(uri, null, false);
return values.length;
}
/**
* This is the implementation of the backing SQLite database
* for storing and manipulating movie content.
*
* This is specified as a private inner class to the provider
* as we don't want it to be publicly available for any
* operations outside the API made available by the host
* provider implementation.
*/
private static class MoviesDatabaseHelper extends SQLiteOpenHelper {
private static final int DATABASE_VERSION = 1;
private static final String DATABASE_NAME = "movies.db";
public MoviesDatabaseHelper(Context context) {
super(context, DATABASE_NAME, null, DATABASE_VERSION);
}
@Override
public void onCreate(SQLiteDatabase database) {
database.execSQL(Movies.getCreateTableSql());
database.execSQL(MovieVideos.getCreateTableSql());
database.execSQL(MovieReviews.getCreateTableSql());
database.execSQL(PopularMovies.getCreateTableSql());
database.execSQL(TopRatedMovies.getCreateTableSql());
}
@Override
public void onUpgrade(SQLiteDatabase sqLiteDatabase, int i, int i1) { }
}
}
|
<filename>src/app/models/swagger/swagger.response.ts
import { Schema } from './swagger.models';
/**
* Represent all existing responses in the API definition
*/
export interface SwaggerResponse {
/** response code (200, 204, 401, ...) */
[ref: string]: {
/** description like Data Returned, Data Not Found, ... */
description: string;
/** model with contains de data */
schema?: Schema;
};
}
|
from random import randint
name = "file"
def gerar_arquivo_aleatorio(nome, quantidade):
with open(name + "-{}".format(quantidade), "w+") as file:
for i in range(quantidade):
file.write("%d," % (randint(-quantidade + 1, quantidade)))
gerar_arquivo_aleatorio(name, 1000)
gerar_arquivo_aleatorio(name, 10000)
gerar_arquivo_aleatorio(name, 100000)
|
#!/usr/bin/env bash
mv Bolts.Android/bin/Release/*.nupkg LocalRepository
mv Couchbase.Lite.Android/bin/Release/*.nupkg LocalRepository
mv Couchbase.Lite.Android.Custom/bin/Release/*.nupkg LocalRepository
mv Couchbase.Lite.Android.ForestDB/bin/Release/*.nupkg LocalRepository
mv Couchbase.Lite.Java.Core/bin/Release/*.nupkg LocalRepository
mv Jackson.Annotations/bin/Release/*.nupkg LocalRepository
mv Jackson.Core/bin/Release/*.nupkg LocalRepository
mv Jackson.Databinding/bin/Release/*.nupkg LocalRepository
mv Mpos.Android.Core/bin/Release/*.nupkg LocalRepository
mv Mpos.Android.Ui/bin/Release/*.nupkg LocalRepository
mv Mpos.Core/bin/Release/*.nupkg LocalRepository
mv Otto/bin/Release/*.nupkg LocalRepository
mv StateLess4j/bin/Release/*.nupkg LocalRepository
|
#!/bin/bash
#$-m abe
#$-M yding4@nd.edu
#$-q gpu # specify the queue
#$-l gpu_card=4
#$-N aida_xml_from_end2end_neural
export PATH=/afs/crc.nd.edu/user/y/yding4/.conda/envs/e2e_EL_evaluate/bin:$PATH
export LD_LIBRARY_PATH=/afs/crc.nd.edu/user/y/yding4/.conda/envs/e2e_EL_evaluate/lib:$LD_LIBRARY_PATH
CODE=/scratch365/yding4/e2e_EL_evaluate/e2e_EL_evaluate/prepare_data/end2end_neural_el/xml_from_end2end_neural_el.py
INPUT_DIR=/scratch365/yding4/e2e_EL_evaluate/data/aida/xml/trans_span2el_span
OUTPUT_DIR=/scratch365/yding4/e2e_EL_evaluate/data/aida/xml/copy_xml_from_end2end_neural_el
DATASETS="['aida_testa','aida_testb','aida_train']"
URL="http://localhost:5555"
python ${CODE} --input_dir ${INPUT_DIR} --output_dir ${OUTPUT_DIR} --datasets ${DATASETS} --URL ${URL}
|
import numpy as np
def random_image_augmentation(x, height_shift_range, width_shift_range):
h, w = x.shape[0], x.shape[1]
if height_shift_range:
tx = np.random.uniform(-height_shift_range, height_shift_range) * h
else:
tx = 0
if width_shift_range:
ty = np.random.uniform(-width_shift_range, width_shift_range) * w
else:
ty = 0
# Perform the image augmentation by applying the calculated shifts
augmented_image = perform_shift(x, tx, ty)
return augmented_image
def perform_shift(image, tx, ty):
# Perform the actual shift on the image using interpolation
# Implementation of image shifting is dependent on the specific image processing library being used
# Example: Using OpenCV for image shifting
# shifted_image = cv2.warpAffine(image, np.float32([[1, 0, tx], [0, 1, ty]]), (image.shape[1], image.shape[0]))
# Placeholder return for the shifted image
shifted_image = image # Replace with actual implementation based on the image processing library
return shifted_image
|
package net.synqg.qg.nlg.qgtemplates;
import lombok.experimental.Accessors;
import net.synqg.qg.nlp.labels.NamedEntityType;
import net.synqg.qg.service.QaPair;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author viswa
*/
@Accessors(fluent = true)
public interface QgTemplate {
Map<NamedEntityType, String> NAMED_ENTITY_TYPE_OBJECT_MAP = new HashMap<NamedEntityType, String>() {
{
put(NamedEntityType.ORGANIZATION, "Where");
put(NamedEntityType.PERSON, "Whom");
put(NamedEntityType.LOCATION, "Where");
put(NamedEntityType.DATE, "When");
}
};
Map<NamedEntityType, String> NAMED_ENTITY_TYPE_SUBJECT_MAP = new HashMap<NamedEntityType, String>() {
{
put(NamedEntityType.ORGANIZATION, "Who");
put(NamedEntityType.PERSON, "Who");
put(NamedEntityType.LOCATION, "What");
put(NamedEntityType.DATE, "When");
}
};
/**
* Generate a Question for user sentence.
*
* @return list of question answer pairs
*/
List<QaPair> generateQuestion();
default String templateName() {
return this.getClass().getSimpleName();
}
String trigger();
}
|
import { getRepository, Repository } from 'typeorm';
import IUsersUpdateRepository from '@modules/users/Repositories/IUpdateUsersRepository';
import IUpdateUserDTO from '@modules/users/dtos/IUpdateUsersDTO';
import User from '@modules/users/infra/typeorm/models/UsersUpdate';
class UserUpdateRepository implements IUsersUpdateRepository {
private ormRepository:Repository<User>;
constructor(){
this.ormRepository=getRepository(User);
}
public async findById(id: string): Promise<User | undefined>{
const findUsers = await this.ormRepository.findOne({
where: { id },
});
return findUsers;
}
public async findByEmail(email: string): Promise<User | undefined>{
const findUsers = await this.ormRepository.findOne({
where: { email },
});
return findUsers;
}
public async create({ name,email,groupsId,powerUser}:IUpdateUserDTO):Promise<User>{
const user=this.ormRepository.create({name,email,groupsId,powerUser});
await this.ormRepository.save(user);
return user;
}
public async save(user:User):Promise<User>{
await this.ormRepository.save(user)
return user;
}
public async find(user:User):Promise<User>{
await this.ormRepository.find(user);
return user;
}
}
export default UserUpdateRepository;
|
#!/bin/bash
#runstuff
CLEARDB="cd contriboard-populator/ && fab clear_database"
APIVERSION="cd /home/vagrant/teamboard-api/ && echo Api version: >> /home/vagrant/stats/version.txt && git describe >> /home/vagrant/stats/version.txt"
IOVERSION="cd /home/vagrant/teamboard-io/ && echo IO version: >> /home/vagrant/stats/version.txt && git describe >> /home/vagrant/stats/version.txt"
CLIENTVERSION="cd /home/vagrant/teamboard-client-react/ && echo Client version: >> /home/vagrant/stats/version.txt && git describe >> /home/vagrant/stats/version.txt"
DBVERSION="echo MongoDB version: >> /home/vagrant/stats/version.txt && mongod --version >> /home/vagrant/stats/version.txt"
SYSINFO="sudo lshw >> /home/vagrant/stats/sysinfo.txt"
SETTIME="sudo ln -sf /usr/share/zoneinfo/Europe/Helsinki /etc/localtime && date"
echo 'Set time:'
vagrant ssh -c "${SETTIME}"
echo 'Time set...'
echo '\nGet Updated tests:'
cd test/ && git pull
cd ..
echo '\nClear Database:'
vagrant ssh -c "${CLEARDB}"
echo 'Database Cleared.\n'
echo '\nGet Contriboard API version:'
vagrant ssh -c "${APIVERSION}"
echo 'API version at /stats/version.txt\n'
echo 'Get Contriboard IO version:'
vagrant ssh -c "${IOVERSION}"
echo 'IO version at /stats/version.txt\n'
echo 'Get Contriboard CLIENT version:'
vagrant ssh -c "${CLIENTVERSION}"
echo 'CLIENT version at /stats/version.txt\n'
echo 'Get Contriboard MONGODB version:'
vagrant ssh -c "${DBVERSION}"
echo 'MONGODB version at /stats/version.txt\n'
echo 'Get System info:'
vagrant ssh -c "${SYSINFO}"
echo 'System info at /stats/sysinfo.txt\n'
cd test/robot-framework/ContriboardTesting/
pybot RegTestUsers.txt
pybot Invalid_Login_Test.txt
pybot New_User_Test.txt
pybot Old\ User\ Test.txt
cd .. && cd ContriboardTestScenarios/
pybot RegisterUsers.txt
pybot Scenario1.rst
pybot Scenario2.rst
pybot Scenario3.rst
pybot Scenario4.rst
pybot Scenario5.rst
cd .. && cd .. && cd ..
echo 'Clear Database:'
killall firefox
vagrant ssh -c "${CLEARDB}"
echo 'Database Cleared.\n'
|
/*
* Class and functions to provide communication with the Veles Web API
* server over websocket.
*
* Copyright (C) 2019 The Veles Core developers
* Author: <NAME>
*
* This program is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation, either version 3
* of the License, or (at your option) any later version.
*/
var velesSocketClient = {
host: (window.location.pathname.indexOf('/veles/') != -1) // dev machines
? 'localhost'
: 'explorer.veles.network',
port: (window.location.pathname.indexOf('/veles/') != -1)
? 8882
: 443,
protocol: (window.location.pathname.indexOf('/veles/') != -1) // dev machines
? 'ws'
: 'wss', // protocol: wss of ws
retries: 300,
retryTimeout: 2000,
connected: false,
hooks: {},
onResultCallbacks: {},
requestID: 1
};
velesSocketClient.log = function(msg) {
if (typeof console !== 'undefined')
console.log(msg)
if (document.getElementById('debug-area') != null) {
li = document.createElement('li');
li.innerHTML = msg;
document.getElementById('debug-area').prepend(li);
}
}
velesSocketClient.clear_console = function(msg) {
if (document.getElementById('debug-area') != null) {
document.getElementById('debug-area').innerHTML = '';
if (this.connected) {
li = document.createElement('li');
li.innerHTML = '... (connected)';
document.getElementById('debug-area').appendChild(li);
}
}
}
velesSocketClient.send_cmd = function(service, name, requestID, data = {}, filter = null) {
msg = window.JSON.stringify({
'message-type': 'command',
'name': name,
'service': service,
'data': {},
'filter': filter,
'request-id': requestID
})
if (!this.ws || !this.connected) {
velesSocketClient.log('No WebSocket connection');
velesSocketClient.connect()
}
if (this.connected) {
velesSocketClient.log('>> ' + msg);
this.ws.send(msg);
}
};
velesSocketClient.get_cmd_result = function(service, name, data, callback, filter = null) {
if (!this.ws || !this.connected) {
velesSocketClient.log('No WebSocket connection');
velesSocketClient.connect();
}
if (this.connected) {
velesSocketClient.requestID++;
velesSocketClient.onResultCallbacks[velesSocketClient.requestID] = callback;
velesSocketClient.send_cmd(service, name, velesSocketClient.requestID, data, filter);
}
};
velesSocketClient.handle = function(hook_name, data = null) {
if (!velesSocketClient.hooks.hasOwnProperty(hook_name)) {
velesSocketClient.log('No handlers found for ' + hook_name)
return;
}
if (velesSocketClient.hooks.hasOwnProperty(hook_name)) {
for (var i = velesSocketClient.hooks[hook_name].length - 1; i >= 0; i--) {
if (data)
velesSocketClient.hooks[hook_name][i](data);
else
velesSocketClient.hooks[hook_name][i]();
}
}
};
velesSocketClient.on = function(hook_name, callback) {
if (!velesSocketClient.hooks.hasOwnProperty(hook_name))
velesSocketClient.hooks[hook_name] = [];
velesSocketClient.hooks[hook_name].push(callback);
};
velesSocketClient.connect = function() {
velesSocketClient.log("Connecting to " + velesSocketClient.host + " ...")
var ws = new WebSocket(velesSocketClient.protocol + "://" + velesSocketClient.host + ":" + velesSocketClient.port + "/ws/");
ws.onopen = function() {
velesSocketClient.log('WebSocket connected, waiting for events');
velesSocketClient.ws = ws;
velesSocketClient.connected = true;
velesSocketClient.handle('connect');
};
ws.onerror = function() {
velesSocketClient.log('WebSocket error');
};
ws.onclose = function() {
velesSocketClient.log('WebSocket closed');
velesSocketClient.connected = false;
velesSocketClient.handle('disconnect');
if (velesSocketClient.retries) {
window.setTimeout(function() {
velesSocketClient.retries--;
velesSocketClient.connect();
}, velesSocketClient.retryTimeout);
}
};
ws.onmessage = function(msgevent) {
var payload = msgevent.data; //JSON.parse(msgevent.data);
velesSocketClient.log('<< ' + payload);
var msg = JSON.parse(payload);
if (msg.hasOwnProperty('message-type') && msg['message-type'] == 'event' && msg.hasOwnProperty('name')) {
velesSocketClient.handle(msg.name, msg);
} else if (msg['message-type'] == 'response') {
if (velesSocketClient.onResultCallbacks.hasOwnProperty(msg['request-id'])) {
velesSocketClient.onResultCallbacks[msg['request-id']](msg['data']);
delete velesSocketClient.onResultCallbacks[msg['request-id']];
}
} else if (msg['message-type'] == 'error' && msg.hasOwnProperty('request-id')) {
if (velesSocketClient.onResultCallbacks.hasOwnProperty(msg['request-id'])) {
velesSocketClient.onResultCallbacks[msg['request-id']](msg);
delete velesSocketClient.onResultCallbacks[msg['request-id']];
}
}
};
};
|
console.time('Time taken');
var app = require('./app');
const path = require('path');
var config = require(path.join(__dirname, '../configuration', 'config.json'));
const chalk = require('chalk');
// create server of app
var server = app.listen(config.serverPort);
console.timeEnd('Time taken');
console.log(`SCTK release version: ${config.release}`)
console.log(`Server is listening on ${config.serverPort}`);
// Token variable check
if (!Object.prototype.hasOwnProperty.call(process.env, 'GITHUB_TOKEN'))
console.log(
chalk.yellow(
'WARNING: GITHUB_TOKEN environment variable is not available, SCTK will not be able to load the extensions from the Github Enterprise'
)
);
else console.log(chalk.green('GITHUB_TOKEN environment variable detected'));
module.exports = server;
|
#!/usr/bin/env bash
set -o pipefail
declare -i errors=0
GREEN="\033[0;32m"
YELLOW="\e[0;33m"
RED="\033[0;31m"
NOCOL="\033[0m"
trap ctrl_c INT
function ctrl_c() {
printf "${YELLOW}Existing due to interrupt!${NOCOL}\n"
exit
}
testcases=()
if [ "$1" == "--tests" ]
then
IFS=','; testcases=( $2 )
shift
shift
fi
function run_case()
{
[ ${#testcases[@]} == 0 ] && return 0
for testcase in ${testcases[@]}
do
[ "$testcase" == "$1" ] && return 0
done
return 1
}
function assert()
{
actual="$1"
actual="${actual//$'\r'/}"
expected="$2"
if [[ "$actual" != "$expected" ]]; then
printf "${RED}FAIL - %s\n" "$(cmp <(echo $actual) <(echo $expected))"
printf "\t${YELLOW}'%s'${NOCOL} vs\n" "$actual"
printf "\t'%s'${NOCOL}\n" "$expected"
errors=$(( $errors + 1 ))
# else
# printf "${GREEN}PASS '$1' equals '$2'${NOCOL}\n"
fi
}
function finish() {
[ $errors -gt 0 ] && printf "${RED}" || printf "${GREEN}"
printf "Done with $errors error!${NOCOL}\n"
}
if (run_case "parse-opts")
then
echo "============================> Testing parse-opts <============================"
./parse-opts-test.sh
[ $? -gt 0 ] && errors=$(( $errors + 1 ))
fi
if (run_case "Vagrantfile")
then
echo "============================> Testing Vagrantfile <============================"
ruby autotest.rb
error=$?
[ $error -gt 0 ] && errors=$(( $errors + $error ))
fi
echo "=========================> Testing minicoin commands <========================="
if (run_case "list")
then
echo "=== Testing list"
minicoin list > /dev/null
error=$?
assert $error 0
fi
if (run_case global)
then
echo "=== Testing in global environment"
cd ..
minicoin list --machine-readable | cut -d ',' -f 6 | (
while read machine
do
echo " $machine"
minicoin describe $machine > /dev/null
error=$?
[ $error -gt 0 ] && errors=$(( $errors + $error ))
done
)
[ $error -gt 0 ] && errors=$(( $errors + $error ))
printf " = Returning to test environment "
cd -
fi
echo "============================> Testing job running <============================"
if (run_case run)
then
IFS=$'\n' machines=( `minicoin list --machine-readable | grep '*' | cut -d, -f 6` )
count=${#machines[@]}
if [ $count -eq 0 ]; then
echo "No machines running, bring up test machines for more tests"
else
echo "Running test on $count machines in sequence"
stdout=""
stderr=""
minicoin run --jobconfig 0 test ${machines[@]} -- error > .std.out 2> .std.err
return=$?
stdout=`grep "Hello" .std.out`
stderr=`grep "error code" .std.err`
rm .std.out
rm .std.err
assert $return $(( $count*42 ))
assert `echo "$stdout" | head -n 1` "Hello runner!"
assert `echo "$stderr" | head -n 1` "Exiting with error code 42"
assert `echo "$stdout" | wc -l | xargs` $count
assert `echo "$stderr" | wc -l | xargs` $count
fi
if [ $count -gt 1 ]
then
if [[ $errors -gt 0 ]]
then
printf "${RED}Skipping advanced tests due to earlier errors${NOCOL}\n"
else
echo "Running test on $count machines in parallel"
minicoin run --parallel --jobconfig 0 test "${machines[@]}" > .std.out 2> .std.err
return=$?
rm .std.out
rm .std.err
assert $return 0
minicoin run --parallel --jobconfig 0 test "${machines[@]}" -- error > .std.out 2> .std.err
return=$?
rm .std.out
rm .std.err
assert $return $(( $count*42 ))
fi
fi
fi
finish
exit $errors
|
class Resource:
def __init__(self, name, type, address, phone, latitude, longitude, description):
self.name = name
self.type = type
self.address = address
self.phone = phone
self.latitude = latitude
self.longitude = longitude
self.description = description
def get_details(self):
return f"Name: {self.name}, Type: {self.type}, Address: {self.address}, Phone: {self.phone}, Latitude: {self.latitude}, Longitude: {self.longitude}, Description: {self.description}"
jewish_resources = [
Resource("Feed Me Now", "restaurant", "123 1st Street, New York, NY - 555-5555", 123.123, -123.123, "stub"),
Resource("Daven Now", "shul", "234 2nd Street, New York, NY - 444-4444", 44.55, 44.55, "stub 2"),
Resource("Buy Now", "store", "345 3rd Street, New York, NY - 666-6666", 23.8, 15, "stub 3")]
for resource in jewish_resources:
print(resource.get_details())
|
<gh_stars>0
# _*_ coding: utf-8 _*_
"""
Created by lr on 2019/09/03.
「pay接口」只能用户访问,CMS管理员不能反问
"""
from app.libs.redprint import RedPrint
from app.libs.token_auth import auth
from app.service.pay import Pay as PayService
from app.validators.params import IDMustBePositiveInt
__author__ = 'lr'
api = RedPrint(name='pay', description='支付')
@api.route('/pre_order', methods=['POST'])
@api.doc()
@auth.login_required
def get_pre_order():
'''获取预订单'''
order_id = IDMustBePositiveInt().validate_for_api().id.data
pay = PayService(order_id)
pay.pay()
pass
|
<filename>test/concat_hars_test.js
var hars = require('../helpers/hars');
exports.hars = {
'parse and concat': function (test) {
var har = hars(['test/data/har1.js', 'test/data/har2.js']);
test.equal(har.log.entries.length, 6);
test.equal(har.log.entries[2], 3);
test.done();
}
};
|
<reponame>jiawei397/deno-oak-nest
// deno-lint-ignore-file no-explicit-any
import { Constructor } from "../../../src/interfaces/type.interface.ts";
import { schedulerRegistry } from "./scheduler.registry.ts";
export function Cron(cronTime: string): MethodDecorator {
return function (
target: InstanceType<Constructor>,
propertyKey: string | symbol,
_descriptor: PropertyDescriptor,
) {
schedulerRegistry.addCronJob(target.constructor, {
cronTime,
methodName: propertyKey,
});
};
}
export function Timeout(delay: number, name?: string): MethodDecorator {
return function (
target: any,
propertyKey: string | symbol,
_descriptor: PropertyDescriptor,
) {
schedulerRegistry.addTimeoutJob(target.constructor, {
delay,
methodName: propertyKey,
jobName: name,
});
};
}
export function Interval(delay: number, name?: string): MethodDecorator {
return function (
target: any,
propertyKey: string | symbol,
_descriptor: PropertyDescriptor,
) {
schedulerRegistry.addIntervalJob(target.constructor, {
delay,
methodName: propertyKey,
jobName: name,
});
};
}
|
#!/bin/bash
set -eux
BUILD_BINARIESDIRECTORY="${BUILD_BINARIESDIRECTORY:-build}"
cd $BUILD_BINARIESDIRECTORY
git clone https://github.com/openvpn/openvpn
cd openvpn
autoreconf -iv
./configure > build.log 2>&1 || (cat build.log && exit 1)
make > build.log 2>&1 || (cat build.log && exit 1)
echo test > /tmp/auth.txt
echo test >> /tmp/auth.txt
CONFIG=`ls /tmp/*l3*ovpn`
cat << EOF > tests/t_client.rc
CA_CERT=fake
TEST_RUN_LIST="1 2"
OPENVPN_BASE="--config $CONFIG --auth-user-pass /tmp/auth.txt"
RUN_TITLE_1="testing udp/ipv4"
OPENVPN_CONF_1="--dev null --proto udp --port 1194 \$OPENVPN_BASE"
RUN_TITLE_2="testing tcp/ipv4"
OPENVPN_CONF_2="--dev null --proto tcp --port 1194 \$OPENVPN_BASE"
EOF
sed -i 's/^remote.*$/remote 127.0.0.1 1194/g' /tmp/*l3*ovpn
make test_scripts=t_client.sh check
|
-- --------------------------------------------------------
-- Host: 127.0.0.1
-- Server version: 10.4.13-MariaDB - mariadb.org binary distribution
-- Server OS: Win64
-- HeidiSQL Version: 11.2.0.6213
-- --------------------------------------------------------
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET NAMES utf8 */;
/*!50503 SET NAMES utf8mb4 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
-- Dumping structure for table pekerja.agamas
CREATE TABLE IF NOT EXISTS `agamas` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`keterangan_agama` varchar(20) DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE=MyISAM AUTO_INCREMENT=8 DEFAULT CHARSET=latin1 ROW_FORMAT=DYNAMIC;
-- Dumping structure for table pekerja.genders
CREATE TABLE IF NOT EXISTS `genders` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`keterangan_gender` varchar(20) DEFAULT NULL,
'keterangan_lp' varchar(20) DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE=MyISAM AUTO_INCREMENT=8 DEFAULT CHARSET=latin1 ROW_FORMAT=DYNAMIC;
-- Dumping data for table pekerja.agamas: 8 rows
/*!40000 ALTER TABLE `agamas` DISABLE KEYS */;
INSERT INTO `agamas` (`id`, `keterangan_agama`, `created_at`, `updated_at`) VALUES
(1, 'ISLAM', NULL, NULL),
(2, 'KRISTEN', NULL, NULL),
(3, 'KATHOLIK', NULL, NULL),
(4, 'HINDU', NULL, NULL),
(5, 'BUDHA', NULL, NULL),
(6, 'KONGHUCU', NULL, NULL),
(7, '<NAME>', NULL, '2022-01-18 03:02:42');
/*!40000 ALTER TABLE `agamas` ENABLE KEYS */;
-- Dumping structure for table pekerja.initial_sistem
CREATE TABLE IF NOT EXISTS `initial_sistem` (
`nama_lembaga` varchar(225) DEFAULT NULL,
`nama_kontak_person` varchar(225) DEFAULT NULL,
`telepon` varchar(225) DEFAULT NULL,
`email` varchar(225) DEFAULT NULL,
`draft_surat_keluar` text DEFAULT NULL,
`directory_arsip_surat` varchar(225) DEFAULT NULL,
`nomor_otomatis_surat_keluar` int(11) DEFAULT NULL,
`nomor_otomatis_surat_pelayanan` int(11) DEFAULT NULL,
`alamat_lembaga` varchar(225) DEFAULT NULL,
`nama_kota_lembaga` varchar(225) DEFAULT NULL,
`nama_kecamatan_lembaga` varchar(225) DEFAULT NULL,
`nama_kelurahan_lembaga` varchar(225) DEFAULT NULL,
`logo_lembaga` bigint(20) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=DYNAMIC;
-- Dumping data for table pekerja.initial_sistem: ~1 rows (approximately)
/*!40000 ALTER TABLE `initial_sistem` DISABLE KEYS */;
INSERT INTO `initial_sistem` (`nama_lembaga`, `nama_kontak_person`, `telepon`, `email`, `draft_surat_keluar`, `directory_arsip_surat`, `nomor_otomatis_surat_keluar`, `nomor_otomatis_surat_pelayanan`, `alamat_lembaga`, `nama_kota_lembaga`, `nama_kecamatan_lembaga`, `nama_kelurahan_lembaga`, `logo_lembaga`) VALUES
('Kel<NAME>', 'KAMSANI', '0800000000', '<EMAIL>', NULL, 'c:\\xampp\\SiAgenTalas\\output\\arsip_surat', 201, 430, 'Jln. RE Martadinata No.10', 'Balikpapan', 'Balikpapan Kota', 'Telaga Sari', NULL);
/*!40000 ALTER TABLE `initial_sistem` ENABLE KEYS */;
-- Dumping structure for table pekerja.migrations
CREATE TABLE IF NOT EXISTS `migrations` (
`id` int(10) unsigned NOT NULL AUTO_INCREMENT,
`migration` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`batch` int(11) NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- Dumping data for table pekerja.migrations: ~0 rows (approximately)
/*!40000 ALTER TABLE `migrations` DISABLE KEYS */;
INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES
(1, '2021_04_05_055015_create_sessions_table', 1);
/*!40000 ALTER TABLE `migrations` ENABLE KEYS */;
-- Dumping structure for table pekerja.sessions
CREATE TABLE IF NOT EXISTS `sessions` (
`id` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`user_id` bigint(20) unsigned DEFAULT NULL,
`ip_address` varchar(45) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`user_agent` text COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`payload` text COLLATE utf8mb4_unicode_ci NOT NULL,
`last_activity` int(11) NOT NULL,
PRIMARY KEY (`id`),
KEY `sessions_user_id_index` (`user_id`),
KEY `sessions_last_activity_index` (`last_activity`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- Dumping data for table pekerja.sessions: ~1 rows (approximately)
/*!40000 ALTER TABLE `sessions` DISABLE KEYS */;
INSERT INTO `sessions` (`id`, `user_id`, `ip_address`, `user_agent`, `payload`, `last_activity`) VALUES
('ookBdVJ<KEY>', 1, '127.0.0.1', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36', 'YTo1OntzOjY6Il90b2tlbiI7czo0MDoiOG1LclBTRUh6a2JXSjVZMkR4M1g2cm5wZEptTHEzclBwRm5KcGpvaCI7czo2OiJfZmxhc2giO2E6Mjp7czozOiJvbGQiO2E6MDp7fXM6MzoibmV3IjthOjA6e319czozOiJ1cmwiO2E6MDp7fXM6OToiX3ByZXZpb3VzIjthOjE6e3M6MzoidXJsIjtzOjMzOiJodHRwOi8vMTI3LjAuMC4xOjgwMDAvc3VyYXQtbWFzdWsiO31zOjUwOiJsb2dpbl93ZWJfNTliYTM2YWRkYzJiMmY5NDAxNTgwZjAxNGM3ZjU4ZWE0ZTMwOTg5ZCI7aToxO30=', 1628519639);
/*!40000 ALTER TABLE `sessions` ENABLE KEYS */;
-- Dumping structure for table pekerja.users
CREATE TABLE IF NOT EXISTS `users` (
`id` int(5) unsigned NOT NULL AUTO_INCREMENT,
`nama_lengkap` varchar(50) COLLATE utf8mb4_unicode_ci NOT NULL,
`username` varchar(25) COLLATE utf8mb4_unicode_ci NOT NULL,
`email` varchar(150) COLLATE utf8mb4_unicode_ci NOT NULL,
`jabatan` varchar(150) COLLATE utf8mb4_unicode_ci NOT NULL,
`nomor_hp` varchar(150) COLLATE utf8mb4_unicode_ci NOT NULL,
`password` varchar(150) COLLATE utf8mb4_unicode_ci NOT NULL,
`role` varchar(100) COLLATE utf8mb4_unicode_ci NOT NULL,
`status_user` varchar(1) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT '1',
`remember_token` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE KEY `users_email_unique` (`email`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=19 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci ROW_FORMAT=DYNAMIC;
-- Dumping data for table pekerja.users: ~1 rows (approximately)
/*!40000 ALTER TABLE `users` DISABLE KEYS */;
INSERT INTO `users` (`id`, `nama_lengkap`, `username`, `email`, `jabatan`, `nomor_hp`, `password`, `role`, `status_user`, `remember_token`, `created_at`, `updated_at`) VALUES
(1, 'admin', 'admin', '<EMAIL>', '<PASSWORD>', '<PASSWORD>', <PASSWORD>', 'admin', '1', NULL, '2021-08-08 22:30:42', '2021-08-09 05:14:10');
/*!40000 ALTER TABLE `users` ENABLE KEYS */;
/*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */;
/*!40014 SET FOREIGN_KEY_CHECKS=IFNULL(@OLD_FOREIGN_KEY_CHECKS, 1) */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40111 SET SQL_NOTES=IFNULL(@OLD_SQL_NOTES, 1) */;
|
#!/bin/bash -x
#####################################################################
# SUMMARY: Train a quantized marian model
# AUTHOR: afaji
#####################################################################
# Exit on error
set -e
PREFIX=quantized-log4bit
# Remove old artifacts and create working directory
rm -rf train $PREFIX.{log,out,diff}
mkdir -p train
# Train an 8-bits model
$MRT_MARIAN/marian \
--no-shuffle --seed 1111 --dim-emb 32 --dim-rnn 64 --mini-batch 32 --maxi-batch 1 --maxi-batch-sort none --learn-rate 0.1 --optimizer sgd \
-m train/model.npz -t $MRT_DATA/europarl.de-en/corpus.bpe.{en,de} -v train/vocab.en.yml train/vocab.de.yml \
--cost-type cross-entropy --sync-sgd --after-batches 100 --disp-freq 10 --quantize-bits 4 --quantize-log-based --quantize-optimization-steps 3 \
--log $PREFIX.log
# Check if files exist
test -e train/model.npz
test -e $PREFIX.log
# Compare the current output with the expected output
cat $PREFIX.log | $MRT_TOOLS/extract-costs.sh > $PREFIX.out
$MRT_TOOLS/diff-nums.py $PREFIX.out $PREFIX.expected -o $PREFIX.diff
# make sure that the resulting model has no more than 256 different values (i.e. quantized)
$MRT_TOOLS/check-model-unique-vals.py train/model.npz -b 4
# Exit with success code
exit 0
|
def print_combinations(arr)
arr.combination(3).to_a.each { |x| puts x.inspect }
end
print_combinations([1,2,3])
|
<gh_stars>0
#include <iostream>
#include <ctime>
#include "alkohole.h"
#include "towar.h"
using namespace std;
int main()
{
//srand(unsigned(time(0)));
srand(0);
cout << endl;
cout << "*********************** ETAP 1 (1 pkt) *********************** " << endl << endl;
Wino w1("<NAME>", 42.90, 2013, rodzaj_wina::WYTRAWNE);
Wino w2("<NAME>", 23.90, 2015, rodzaj_wina::POLSLODKIE);
Wino w3("<NAME>", 66.99, 2000, rodzaj_wina::WYTRAWNE);
cout << "nr. 1: " << w1 << endl;
cout << "nr. 2: " << w2 << endl;
cout << "nr. 3: " << w3 << endl << endl;
if (w1 == w3) cout << "Wina 1 i 3 sa takie same" << endl << endl;
else cout << "Blad operatora ==" << endl << endl;
Piwo p1("<NAME>", 35, 5, rodzaj_piwa::JASNE, rodzaj_opakowania::BUTELKA);
Piwo p2("<NAME>", 21, 9, rodzaj_piwa::NIEFILTROWANE, rodzaj_opakowania::BUTELKA);
Piwo p3("<NAME>", 35, 5, rodzaj_piwa::JASNE, rodzaj_opakowania::PUSZKA);
cout << "nr. 1: " << p1 << endl;
cout << "nr. 2: " << p2 << endl;
cout << "nr. 3: " << p3 << endl << endl;
if (w1 == w3) cout << "Piwa 1 i 3 sa takie same" << endl << endl;
else cout << "Blad operatora ==" << endl << endl;
cout << "*********************** ETAP 2 (1.5 pkt) *********************** " << endl << endl;
/*
Towar sklep1;
cout << sklep1 << endl;
sklep1.dodaj(&w1);
sklep1.dodaj(&p1);
sklep1.dodaj(&p2);
sklep1.dodaj(&w2);
sklep1.dodaj(&w3);
sklep1.dodaj(&p3);
cout << "Pierwsza dostawa towaru:" << endl;
cout << sklep1;
cout << "Wartosc towaru w sklepie: " << sklep1.wartosc() << " zl" << endl << endl;
cout << "Losowo wybrana bytelka do wzniesienia toastu na otwarcie sklepu: " << endl;
cout << *sklep1.sprzedaj() << endl << endl;
cout << "W sklepie pozostalo:" << endl;
cout << sklep1;
cout << "Wartosc towaru: " << sklep1.wartosc() << " zl" << endl << endl;
*/
cout << "*********************** ETAP 3 (1,5 pkt) *********************** " << endl << endl;
/*
sklep1.wczytaj("wino.bin");
cout << "Dostawa win do sklepu:" << endl;
cout << sklep1;
cout << "Wartosc towaru: " << sklep1.wartosc() << " zl" << endl << endl;
*/
cout << "*********************** ETAP 4 (2.0 pkt) *********************** " << endl << endl;
/*
list<Alkohol*> duplikaty;
duplikaty = sklep1.usun_duplikaty();
cout << "Wlasciciel otwiera filie swojego sklepu i przenosi tam wszystkie duplikaty trunkow.";
cout << endl << endl;
cout << "Duplikaty:" << endl;
for (Alkohol* al : duplikaty) cout << *al << endl;
cout << endl << "Pozostaly towar:" << endl;
cout << sklep1;
cout << "Wartosc towaru po usunieciu duplikatow: " << sklep1.wartosc() << " zl" << endl << endl;
*/
cout << "*********************** ETAP 5 (2.0 pkt) *********************** " << endl << endl;
/*
list<Alkohol*> sprzedane = sklep1.sprzedaj(30);
cout << "Pierwszy klient i od razu kupuje to co najlepsze." << endl << endl;
cout << "Towar po sprzedazy najdrozszych:" << endl;
cout << sklep1;
cout << "Wartosc towaru: " << sklep1.wartosc() << " zl" << endl << endl;
cout << "Lista sprzedanego towaru:" << endl;
for (Alkohol* al : sprzedane) cout << *al << endl;
*/
system("pause");
return 0;
}
|
package utils
import (
"fmt"
"testing"
"github.com/stretchr/testify/assert"
)
func TestIsValidLabel(t *testing.T) {
testCases := []struct {
desc string
name string
expectedErr error
}{
{
desc: "valid",
name: "value",
expectedErr: nil,
},
{
desc: "invalid, name greater than 63 characters",
name: "orchestrator-orchestrator-orchestrator-orchestrator-orchestrator",
expectedErr: fmt.Errorf("length of name is greater than 63 characters"),
},
{
desc: "invalid, using non numerical character",
name: "value!",
expectedErr: fmt.Errorf("name violates kubernetes label constraint"),
},
}
for _, tC := range testCases {
t.Run(tC.desc, func(t *testing.T) {
gotErr := IsValidLabel(tC.name)
assert.Equal(t, tC.expectedErr, gotErr)
})
}
}
|
from flask import Blueprint, render_template, request, redirect, flash, url_for
from app import db
import models
bp = Blueprint('api', __name__, url_prefix='/api')
@bp.route('/api')
def api():
return "WIP, I'll put sth there one day"
|
import React from 'react';
import { RouteHandler } from 'react-router';
import Header from 'components/Header'
export default React.createClass({
render () {
return (
<div className="container">
<Header />
<RouteHandler />
</div>
);
}
});
|
# First, build go1.4 using gcc, then use that go to build go>1.4
mkdir go-bootstrap && pushd $_
BOOTSTRAP_TARBALL=go1.4-bootstrap-20170531.tar.gz
# https://storage.googleapis.com/golang/go1.4-bootstrap-20170531.tar.gz.sha256
BOOTSTRAP_TARBALL_CHECKSUM=49f806f66762077861b7de7081f586995940772d29d4c45068c134441a743fa2
curl -LO https://storage.googleapis.com/golang/${BOOTSTRAP_TARBALL}
tar -xzf ${BOOTSTRAP_TARBALL}
[ $(openssl sha -sha256 "${BOOTSTRAP_TARBALL}" | awk '{print $2}') == "${BOOTSTRAP_TARBALL_CHECKSUM}" ] || exit 1
rm -f ${BOOTSTRAP_TARBALL}
export GOROOT_BOOTSTRAP=$PWD/go
cd $GOROOT_BOOTSTRAP/src
./make.bash
pushd $SRC_DIR/src
if [[ $(uname) == 'Darwin' ]]; then
# Tests on macOS receive SIGABRT on Travis :-/
# All tests run fine on Mac OS X:10.9.5:13F1911 locally
./make.bash
elif [[ $(uname) == 'Linux' ]]; then
./all.bash
fi
# Don't need the bootstrap directory anymore
rm -fr ${GOROOT_BOOTSTRAP}
# Don't need the cached build objects
rm -fr ${SRC_DIR}/pkg/obj
mkdir -p ${PREFIX}/go
# Dropping the verbose option here, because Travis chokes on output >4MB
cp -r $SRC_DIR/* ${PREFIX}/go/
rm -f ${PREFIX}/go/conda_build.sh
# Right now, it's just go and gofmt, but might be more in the future!
mkdir -p ${PREFIX}/bin && pushd $_
for binary in ../go/bin/* ; do ln -s $binary ; done
# Install [de]activate scripts.
for CHANGE in "activate" "deactivate"
do
mkdir -p "${PREFIX}/etc/conda/${CHANGE}.d"
cp "${RECIPE_DIR}/${CHANGE}.sh" "${PREFIX}/etc/conda/${CHANGE}.d/${PKG_NAME}_${CHANGE}.sh"
done
|
#!/bin/bash
# This script parses in the command line parameters from runCust,
# maps them to the correct command line parameters for DispNet training script and launches that task
# The last line of runCust should be: bash $CONFIG_FILE --data-dir $DATA_DIR --log-dir $LOG_DIR
# Parse the command line parameters
# that runCust will give out
DATA_DIR=NONE
LOG_DIR=NONE
CONFIG_DIR=NONE
MODEL_DIR=NONE
# Parsing command line arguments:
while [[ $# > 0 ]]
do
key="$1"
case $key in
-h|--help)
echo "Usage: run_dispnet_training_philly.sh [run_options]"
echo "Options:"
echo " -d|--data-dir <path> - directory path to input data (default NONE)"
echo " -l|--log-dir <path> - directory path to save the log files (default NONE)"
echo " -p|--config-file-dir <path> - directory path to config file directory (default NONE)"
echo " -m|--model-dir <path> - directory path to output model file (default NONE)"
exit 1
;;
-d|--data-dir)
DATA_DIR="$2"
shift # pass argument
;;
-p|--config-file-dir)
CONFIG_DIR="$2"
shift # pass argument
;;
-m|--model-dir)
MODEL_DIR="$2"
shift # pass argument
;;
-l|--log-dir)
LOG_DIR="$2"
shift
;;
*)
echo Unkown option $key
;;
esac
shift # past argument or value
done
# Prints out the arguments that were passed into the script
echo "DATA_DIR=$DATA_DIR"
echo "LOG_DIR=$LOG_DIR"
echo "CONFIG_DIR=$CONFIG_DIR"
echo "MODEL_DIR=$MODEL_DIR"
# Run training on philly
# Add the root folder of the code to the PYTHONPATH
export PYTHONPATH=$PYTHONPATH:$CONFIG_DIR
# Run the actual job
python $CONFIG_DIR/examples/AnytimeNetwork/densenet-ann.py \
--data_dir=$DATA_DIR \
--log_dir=$LOG_DIR \
--model_dir=$MODEL_DIR \
--load=${MODEL_DIR}/checkpoint \
--densenet_version=dense \
-f=6 -n=12 -s=6 -g=32 --ds_name=svhn --opt_at=-1 --samloss=6 --batch_size=64
|
<gh_stars>1-10
import numpy as np
def funcy(a1,a2,a1m,a2m):
m=(sum(a1*a2)-len(a1)*a1m*a2m)/(sum(a1**2)-len(a1)*(a1m**2))
c=a2m-(m*a1m)
return(m,c)
def errorid(a2,slop,inter):
ycap=(slop*a1)+inter
s=np.sqrt((sum((a2-ycap)**2))/len(a2))
return(s,ycap)
a1=np.array([4,9,10,14,4,7,12,22,1,17])
a2=np.array([31,58,65,73,37,44,60,91,21,84])
a1m=np.mean(a1)
a2m=np.mean(a2)
print("---------Eqaution for linear regression---------")
slop,inter=funcy(a1,a2,a1m,a2m)
print(f' y = {slop} x + {inter}')
print("---------Root of Mean Square Error---------")
l,ycap=errorid(a2,slop,inter) #I am using the metric RMSE(Root Mean Square Error) for understanding the model
print(l)
|
'use strict';
const config = require('../config/config');
const mongo = require('../common/middleware/mongo').mongo;
const Factory = require('../common/classes/factory');
const {Payment} = require('../common/classes/payment.class');
const {UsersList} = require('../common/classes/users-list.class');
const {TariffsList} = require('../common/classes/tariffs-list.class');
const subscriptionStates = require('../common/classes/subscription-state');
const {AccountingUser, AccountingInternal, OPERATION_CODE_TARIFF} = require('../common/classes/accounting.class');
async function processUser(ctx, u) {
const user = Factory.User(ctx, u);
const tariffsList = new TariffsList(ctx);
const tariff = await tariffsList.GetById(user.GetTariffId());
if (!tariff) {
await user.SetLastBillingDate();
return;
}
const cost = tariff.dayPrice;
if (cost === 0) {
await user.SetLastBillingDate();
return;
}
const accountingInternal = new AccountingInternal(ctx);
const balanceInternal = await accountingInternal.GetUserBalance(user.GetId(), OPERATION_CODE_TARIFF);
if (balanceInternal < cost) {
const periodCost = tariff.dayPrice * tariff.periodDays;
const accountingUser = new AccountingUser(ctx);
const balanceUser = await accountingInternal.GetUserBalance(user.GetId(), OPERATION_CODE_TARIFF);
if (balanceUser < periodCost) {
const payment = new Payment(ctx);
if (!await payment.TryRecurringTopup(user, periodCost)) {
await user.SetSubscriptionState(subscriptionStates.suspended);
return;
}
}
await accountingUser.AddUserOperation(user.GetId(), periodCost * -1, OPERATION_CODE_TARIFF, 'tariff prolongation');
await accountingInternal.AddUserOperation(user.GetId(), periodCost, OPERATION_CODE_TARIFF, 'tariff prolongation');
await user.SetSubscriptionState(subscriptionStates.active);
}
await accountingInternal.AddUserOperation(user.GetId(), -1 * cost, OPERATION_CODE_TARIFF,
'tariff daily payment');
await user.SetLastBillingDate();
ctx.commitTransaction();
}
async function start(logger) {
try {
const ctx = {
id: '',
log: logger,
session: {},
query: {},
header: {},
body: undefined,
status: 200,
redirect: function () {
},
throw: function (e) {
throw new Error(e)
},
};
const mongoMw = mongo(config.mongoDsn, {}, config.enableTransactions);
async function processBilling() {
const usersList = new UsersList(ctx);
const billingUsers = await usersList.GetActiveBilling();
for (let u of billingUsers) {
await processUser(ctx, u);
}
const suspendedUsers = await usersList.GetSuspendedBilling();
for (let u of suspendedUsers) {
await processUser(ctx, u);
}
}
await mongoMw(ctx, processBilling);
} catch (e) {
logger.error(e);
process.exit(1);
}
process.exit(0);
}
module.exports = {
start: start,
};
|
<gh_stars>0
const DrawCard = require('../../../drawcard.js');
class FireAndBlood extends DrawCard {
setupCardAbilities() {
this.action({
title: 'Shuffle card from dead pile back into deck',
phase: 'challenge',
target: {
cardCondition: card => card.controller === this.controller && card.location === 'dead pile' && card.isUnique() && card.isFaction('targaryen')
},
handler: context => {
if(context.target.hasTrait('Hatchling')) {
this.selectedCard = context.target;
this.game.promptWithMenu(context.player, this, {
activePrompt: {
menuTitle: 'Put card into play?',
buttons: [
{ text: 'Yes', method: 'putIntoPlay' },
{ text: 'No', method: 'shuffle' }
]
},
source: this
});
} else {
this.shuffleCard(context.target);
}
}
});
}
putIntoPlay(player) {
player.putIntoPlay(this.selectedCard);
this.game.addMessage('{0} plays {1} to remove {2} from their dead pile and put it into play', player, this, this.selectedCard);
return true;
}
shuffle() {
this.shuffleCard(this.selectedCard);
return true;
}
shuffleCard(card) {
this.controller.moveCard(card, 'draw deck');
this.controller.shuffleDrawDeck();
this.game.addMessage('{0} plays {1} to remove {2} from their dead pile and shuffle it into their deck', this.controller, this, card);
}
}
FireAndBlood.code = '01177';
module.exports = FireAndBlood;
|
import {getResource} from '../services/requests'; // Функция для получения данных с сервера
import calc from './calc';
import changeFormDetails from './changeFormDetails';
const createOrderData = (details) => {
getResource('assets/db.json')
.then(res => createData(res.order))
.catch(error => console.log(error));
// Создание селектов для заказа на основе данных сервера
function createData(response) {
response.forEach(({id, value, text}) => {
// Создать селект и присвоить id из базы
let select = document.createElement('select');
select.id = id;
// Перебрать опции в базе и добавить в селект
for (let key in value, text) {
let option = document.createElement('option');
option.value = value[key];
option.textContent = text[key];
select.append(option);
}
// Добавить селект в DOM
document.querySelector('.promocode').before(select);
});
// Запустить калькулятор
calc('#size', '#material', '#options', '.promocode', '.calc-price', details);
// Добавить данные в отправляемую форму
changeFormDetails('#size', '#material', '#options', '.promocode', details);
}
};
export default createOrderData;
|
import React from 'react';
import IconButton from '@material-ui/core/IconButton';
import CancelIcon from '@material-ui/icons/Cancel';
import CheckCircleIcon from '@material-ui/icons/CheckCircle';
const confirmationType = {
retry: 'Creating new deployment...',
abort: 'Aborting...'
};
export default class Confirm extends React.Component {
constructor(props, context) {
super(props, context);
this.state = {
class: 'fadeIn'
};
}
_handleCancel() {
this.setState({ class: 'fadeOut' });
this.props.cancel();
}
_handleConfirm() {
this.setState({ loading: true });
this.props.action();
}
render() {
return (
<div className={`${this.state.class} ${this.props.classes || ''}`} style={{ marginRight: '12px' }}>
<div className="float-right">
<span className="bold">{this.state.loading ? confirmationType[this.props.type] : `Confirm ${this.props.type} deployment?`}</span>
<IconButton id="confirmAbort" onClick={() => this._handleConfirm()}>
<CheckCircleIcon className="green" />
</IconButton>
<IconButton id="cancelAbort" onClick={() => this._handleCancel()}>
<CancelIcon className="red" />
</IconButton>
</div>
</div>
);
}
}
|
#!/bin/bash
# This script installs all the libraries to be used by Compiler Explorer
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
. ${SCRIPT_DIR}/common.inc
ARG1="$1"
install_nightly() {
if [[ "$ARG1" = "nightly" ]]; then
return 0
else
return 1
fi
}
if install_nightly; then
echo "Installing trunk versions"
else
echo "Skipping install of trunk versions"
fi
#########################
# C++
if install_nightly; then
if [[ ! -d "libs/kvasir/mpl/trunk" ]]; then
git clone -q https://github.com/kvasir-io/mpl.git libs/kvasir/mpl/trunk
git -C libs/kvasir/mpl/trunk checkout -q development
else
git -C libs/kvasir/mpl/trunk pull -q origin development
fi
fi
if [[ ! -d cmake ]]; then
mkdir cmake
fetch https://cmake.org/files/v3.11/cmake-3.11.0-rc3-Linux-x86_64.tar.gz | tar zxf - --strip-components 1 -C cmake
fi
install_boost() {
for VERSION in "$@"; do
local VERSION_UNDERSCORE=$(echo ${VERSION} | tr . _)
local DEST=${OPT}/libs/boost_${VERSION_UNDERSCORE}/boost/
if [[ ! -d ${DEST} ]]; then
mkdir -p /tmp/boost
pushd /tmp/boost
fetch https://dl.bintray.com/boostorg/release/${VERSION}/source/boost_${VERSION_UNDERSCORE}.tar.bz2 | tar jxf - boost_${VERSION_UNDERSCORE}/boost
mkdir -p ${OPT}/libs/boost_${VERSION_UNDERSCORE}/boost
rsync -a boost_${VERSION_UNDERSCORE}/boost/ ${DEST}
popd
rm -rf /tmp/boost
fi
done
}
install_boost 1.64.0 1.65.0 1.66.0 1.67.0 1.68.0 1.69.0 1.70.0
install_llvm() {
for VERSION in "$@"; do
local DEST=${OPT}/libs/llvm/${VERSION}
local URL=http://releases.llvm.org/${VERSION}/llvm-${VERSION}.src.tar.xz
if [[ ! -d ${DEST} ]]; then
rm -rf /tmp/llvm
mkdir -p /tmp/llvm
fetch ${URL} | tar Jxf - --strip-components=1 -C /tmp/llvm
mkdir -p ${DEST}
pushd ${DEST}
${OPT}/cmake/bin/cmake /tmp/llvm 2>&1
rsync -a /tmp/llvm/include/ include/
popd
rm -rf /tmp/llvm
fi
done
}
install_llvm_trunk() {
rm -rf /tmp/llvm
mkdir -p /tmp/llvm
svn co -q http://llvm.org/svn/llvm-project/llvm/trunk /tmp/llvm
mkdir -p libs/llvm/trunk
pushd libs/llvm/trunk
${OPT}/cmake/bin/cmake /tmp/llvm 2>&1
rsync -a /tmp/llvm/include/ include/
popd
rm -rf /tmp/llvm
}
install_llvm 4.0.1 5.0.0 5.0.1 5.0.2 6.0.0 6.0.1 7.0.0 7.0.1 8.0.0
if install_nightly; then
install_llvm_trunk
fi
get_or_sync() {
local DIR=$1
local URL=$2
if [[ ! -d "${DIR}" ]]; then
git clone -q "${URL}" "${DIR}"
else
git -C "${DIR}" fetch -q
git -C "${DIR}" reset -q --hard origin
fi
}
get_or_sync_git_tag() {
local DIR=$1
local URL=$2
local TAG=$3
if [[ ! -d "${DIR}" ]]; then
git clone -q "${URL}" "${DIR}"
git -C "${DIR}" checkout -q "${TAG}"
else
git -C "${DIR}" reset -q --hard
git -C "${DIR}" pull -q origin "${TAG}"
fi
}
get_or_sync_git_tags() {
local DIR=$1
local URL=$2
shift 2
for TAG in "$@"; do
get_or_sync_git_tag ${DIR}/${TAG} ${URL} ${TAG}
done
}
get_or_sync libs/cmcstl2 https://github.com/CaseyCarter/cmcstl2.git
get_or_sync libs/GSL https://github.com/Microsoft/GSL.git
get_or_sync libs/gsl-lite https://github.com/martinmoene/gsl-lite.git
get_or_sync libs/opencv https://github.com/opencv/opencv.git
get_or_sync libs/abseil https://github.com/abseil/abseil-cpp.git
get_or_sync libs/cppcoro https://github.com/lewissbaker/cppcoro.git
get_or_sync libs/ctbignum https://github.com/niekbouman/ctbignum.git
get_or_sync libs/outcome https://github.com/ned14/outcome.git
get_or_sync libs/cnl https://github.com/johnmcfarlane/cnl.git
get_or_sync libs/googletest https://github.com/google/googletest.git
get_or_sync libs/tbb https://github.com/01org/tbb.git
get_if_not_there() {
local DIR=$1
local URL=$2
if [[ ! -d ${DIR} ]]; then
mkdir -p ${DIR}
fetch ${URL} | tar zxf - --strip-components=1 -C ${DIR}
fi
}
# Alias for get_if_not_there, but better conveys the intention
get_git_version() {
local DIR=$1
local URL=$2
get_if_not_there ${DIR} ${URL}
}
get_github_versions() {
local DIR=$1
local URL=https://github.com/$2
shift 2
for tag in "$@"; do
get_git_version ${DIR}/${tag} ${URL}/archive/${tag}.tar.gz
done
}
get_github_versioned_and_trunk_with_quirk() {
local DIR=$1
local REPO=$2
local URL=https://github.com/${REPO}
local QUIRK=$3
shift 3
mkdir -p ${DIR}
if install_nightly; then
get_or_sync ${DIR}/${QUIRK}trunk ${URL}.git
fi
for tag in "$@"; do
get_git_version ${DIR}/${QUIRK}${tag} ${URL}/archive/${tag}.tar.gz
done
}
get_github_versioned_and_trunk() {
local DIR=$1
local URL=$2
shift 2
get_github_versioned_and_trunk_with_quirk ${DIR} ${URL} '' "$@"
}
get_github_versioned_and_trunk libs/ulib stefanocasazza/ULib v1.4.2
get_github_versioned_and_trunk libs/google-benchmark google/benchmark v1.2.0 v1.3.0 v1.4.0
get_github_versioned_and_trunk libs/rangesv3 ericniebler/range-v3 0.3.0 0.3.5 0.3.6 0.4.0
get_github_versioned_and_trunk libs/dlib davisking/dlib v19.7 v19.9 v19.10
get_github_versioned_and_trunk libs/libguarded copperspice/libguarded libguarded-1.1.0
get_github_versioned_and_trunk libs/brigand edouarda/brigand 1.3.0
get_github_versioned_and_trunk libs/fmt fmtlib/fmt 5.3.0 5.2.0 5.1.0 5.0.0 4.1.0 4.0.0
get_github_versioned_and_trunk libs/hfsm andrew-gresyk/HFSM 0.8 0.10
get_github_versioned_and_trunk_with_quirk libs/eigen eigenteam/eigen-git-mirror v 3.3.4
get_github_versioned_and_trunk libs/glm g-truc/glm 0.9.8.5 0.9.9.0 0.9.9.1 0.9.9.2 0.9.9.3 0.9.9.4 0.9.9.5
get_github_versioned_and_trunk libs/catch2 catchorg/Catch2 v2.2.2 v2.2.3 v2.3.0 v2.4.0 v2.4.1 v2.4.2 v2.5.0 v2.6.0 v2.6.1 v2.7.0
get_github_versions libs/expected-lite martinmoene/expected-dark v0.0.1
get_github_versioned_and_trunk libs/expected-lite martinmoene/expected-lite v0.1.0
get_github_versioned_and_trunk libs/nlohmann_json nlohmann/json v3.6.0 v3.1.2 v2.1.1
get_github_versioned_and_trunk libs/doctest onqtam/doctest 1.2.9 2.0.0 2.0.1 2.1.0 2.2.0 2.2.1 2.2.2 2.2.3 2.3.0 2.3.1
get_github_versioned_and_trunk libs/eastl electronicarts/EASTL 3.12.01
get_github_versioned_and_trunk libs/xtl QuantStack/xtl 0.5.3 0.4.16
get_github_versioned_and_trunk libs/xsimd QuantStack/xsimd 7.0.0 6.1.4
get_github_versioned_and_trunk libs/xtensor QuantStack/xtensor 0.19.4 0.18.2 0.17.4
get_github_versioned_and_trunk libs/seastar scylladb/seastar seastar-18.08.0
get_github_versions libs/GSL Microsoft/GSL v1.0.0 v2.0.0
get_github_versions libs/vcl darealshinji/vectorclass v1.30
install_blaze() {
for VERSION in "$@"; do
local DEST=${OPT}/libs/blaze/v${VERSION}/
if [[ ! -d ${DEST} ]]; then
mkdir -p /tmp/blaze
pushd /tmp/blaze
fetch https://bitbucket.org/blaze-lib/blaze/downloads/blaze-${VERSION}.tar.gz | tar zxf -
mkdir -p ${DEST}
rsync -a blaze-${VERSION}/ ${DEST}
popd
rm -rf /tmp/blaze
fi
done
}
install_blaze 3.3
install_blaze 3.4
install_blaze 3.5
get_or_sync libs/blaze/trunk https://bitbucket.org/blaze-lib/blaze.git
get_or_sync_git_tags libs/ctre https://github.com/hanickadot/compile-time-regular-expressions.git master v2 ecma-unicode
#########################
# C
install_gnu_gsl_versioned_and_latest() {
# We need to build this, I think?
local DIR=$1
shift
mkdir -p $DIR
if install_nightly; then
get_or_sync ${DIR}/trunk https://git.savannah.gnu.org/git/gsl.git
fi
for tag in "$@"; do
get_if_not_there ${DIR}/${tag} ftp://ftp.gnu.org/gnu/gsl/gsl-${tag}.tar.gz
done
}
#install_gnu_gsl_versioned_and_latest libs/gnu-gsl 2.3 2.4
#########################
# D
if install_nightly; then
if [ ! -d "${OPT}/libs/d/mir-glas-trunk" ]; then
git clone -q https://github.com/libmir/mir-glas.git ${OPT}/libs/d/mir-glas-trunk
git -C ${OPT}/libs/d/mir-glas-trunk checkout -q master
else
git -C ${OPT}/libs/d/mir-glas-trunk pull -q origin master
fi
fi
install_mir_glas() {
for VERSION in "$@"; do
local DEST=${OPT}/libs/d/mir-glas-v${VERSION}/
if [[ ! -d ${DEST} ]]; then
mkdir -p /tmp/mir-glas
pushd /tmp/mir-glas
fetch https://github.com/libmir/mir-glas/archive/v${VERSION}.tar.gz | tar zxf -
mkdir -p ${DEST}
rsync -a mir-glas-${VERSION}/ ${DEST}
popd
rm -rf /tmp/mir-glas
fi
done
}
install_mir_glas 0.1.5 0.2.3 0.2.4
if install_nightly; then
if [ ! -d "${OPT}/libs/d/mir-algorithm-trunk" ]; then
git clone -q https://github.com/libmir/mir-algorithm.git ${OPT}/libs/d/mir-algorithm-trunk
git -C ${OPT}/libs/d/mir-algorithm-trunk checkout -q master
else
git -C ${OPT}/libs/d/mir-algorithm-trunk pull -q origin master
fi
fi
install_mir_algorithm() {
for VERSION in "$@"; do
local DEST=${OPT}/libs/d/mir-algorithm-v${VERSION}/
if [[ ! -d ${DEST} ]]; then
mkdir -p /tmp/mir-algorithm
pushd /tmp/mir-algorithm
fetch https://github.com/libmir/mir-algorithm/archive/v${VERSION}.tar.gz | tar zxf -
mkdir -p ${DEST}
rsync -a mir-algorithm-${VERSION}/ ${DEST}
popd
rm -rf /tmp/mir-algorithm
fi
done
}
install_mir_algorithm 0.5.17 0.6.13 0.6.21 0.9.5 1.0.0 1.1.0
#########################
# CUDA
get_or_sync_git_tags libs/cub https://github.com/NVlabs/cub.git 1.8.0
|
package cn.cerc.jbean.other;
import cn.cerc.jbean.core.Application;
public class SystemTable {
// 帐套资料表
public static final String getBookInfo = "OurInfo";
// 帐套参数档
public static final String getBookOptions = "VineOptions";
// 应用菜单表
public static final String getAppMenus = "SysFormDef";
// 客户客制化菜单
public static final String getCustomMenus = "cusmenu";
// 用户自定义菜单
public static final String getUserMenus = "UserMenu";
// 用户资料表
public static final String getUserInfo = "Account";
// 用户参数表
public static final String getUserOptions = "UserOptions";
// 用户角色表
public static final String getUserRoles = "UserRoles";
// 角色权限表
public static final String getRoleAccess = "UserAccess";
// 用户设备认证记录表
public static final String getDeviceVerify = "AccountVerify";
//安全手机管控表
public final static String getSecurityMobile = "s_securityMobile";
// 当前在线用户
public static final String getCurrentUser = "CurrentUser";
// 记录用户需要查看的消息
public static final String getUserMessages = "message_temp";
// 记录用户的关键操作
public static final String getUserLogs = "UserLogs";
// 记录应用服务被调用的历史
public static final String getAppLogs = "AppServiceLogs";
// 记录网页被调用的历史
public static final String getPageLogs = "WebPageLogs";
// 记录在线用户数
public static final String getOnlineUsers = "onlineusers";
// 运营商帐套代码
public static final String ManageBook = "000000";
// 多语言数据字典: 旧版本
public static final String getLangDict = "s_LangDict";
// 多语言数据字典: 新版本
public static final String getLanguage = "s_Language";
public static String get(String tableCode) {
return Application.getAppConfig().getParam(tableCode, tableCode);
}
// 表格列自定义存储表,建议存于MongoDB
public static String getGridManager() {
return "s_gridManager";
}
}
|
<reponame>aloizo03/MotioNet-Android
package com.example.motionet;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.os.Build;
import android.util.Log;
import android.widget.ProgressBar;
import androidx.annotation.RequiresApi;
import org.opencv.android.Utils;
import org.opencv.core.CvException;
import org.opencv.core.Mat;
import androidx.fragment.app.Fragment;
import java.util.ArrayList;
import java.util.List;
import org.tensorflow.lite.examples.posenet.lib.KeyPoint;
import org.tensorflow.lite.examples.posenet.lib.Person;
public class motionProcess extends Fragment {
int modelWidth = 257, modelHeight = 257;
private ArrayList<Mat> frames;
private ArrayList<Bitmap> framesBitmap;
private ArrayList<posePart> poses;
private pose p;
Context context;
public motionProcess(ArrayList<Mat> framesOpenCV, Context pContext){
this.frames = framesOpenCV;
this.framesBitmap = new ArrayList<>();
p = new pose();
context = pContext;
poses = new ArrayList<>();
}
@RequiresApi(api = Build.VERSION_CODES.O)
public void convertToBitMap() {
for(int i =0; i < frames.size(); i++){
Mat frame = frames.get(i);
try {
Bitmap bmp = Bitmap.createBitmap(frame.cols(), frame.rows(), Bitmap.Config.ARGB_8888);
Utils.matToBitmap(frame, bmp);
Bitmap bitmap = p.cropBitmap(bmp);
bitmap = Bitmap.createScaledBitmap(bitmap, modelWidth, modelHeight, true);
framesBitmap.add(bitmap);
}catch (CvException e){
Log.d("Exception",e.getMessage());
}
}
}
public ArrayList<Mat> getFrames() {
return frames;
}
public ArrayList<Bitmap> getFramesBitmap() {
return framesBitmap;
}
public List<posePart> findKeyPoints(){
List<posePart> poseParts = new ArrayList<>();
// double yMax = 0;
int totalCount = 0;
double score = 0;
for(int i = 0; i < getFramesBitmap().size(); i++){
Bitmap frame = framesBitmap.get(i);
Person person = p.calculateKeyPoint(frame, context);
List<KeyPoint> keyPoints= person.getKeyPoints();
posePart pp = new posePart();
double totalScore = 0;
for(int j=0; j < keyPoints.size(); j++){
KeyPoint kp = keyPoints.get(j);
// Set new key points
if(kp.getBodyPart().toString() == "NOSE"){
int x = kp.getPosition().getX();
int y = kp.getPosition().getY();
totalScore += kp.getScore();
position pos = new position();
pos.setX(x);
pos.setY(y);
pos.setAngle();
pp.setNOSE(pos);
}else if(kp.getBodyPart().toString() == "LEFT_EYE"){
int x = kp.getPosition().getX();
int y = kp.getPosition().getY();
totalScore += kp.getScore();
position pos = new position();
pos.setX(x);
pos.setY(y);
pos.setAngle();
pp.setLEFT_EYE(pos);
}else if(kp.getBodyPart().toString() == "RIGHT_EYE"){
int x = kp.getPosition().getX();
int y = kp.getPosition().getY();
totalScore += kp.getScore();
position pos = new position();
pos.setX(x);
pos.setY(y);
pos.setAngle();
pp.setRIGHT_EYE(pos);
}else if(kp.getBodyPart().toString() == "LEFT_EAR"){
int x = kp.getPosition().getX();
int y = kp.getPosition().getY();
totalScore += kp.getScore();
position pos = new position();
pos.setX(x);
pos.setY(y);
pos.setAngle();
pp.setLEFT_EAR(pos);
}
else if(kp.getBodyPart().toString() == "RIGHT_EAR"){
int x = kp.getPosition().getX();
int y = kp.getPosition().getY();
totalScore += kp.getScore();
position pos = new position();
pos.setX(x);
pos.setY(y);
pos.setAngle();
pp.setRIGHT_EAR(pos);
}else if(kp.getBodyPart().toString() == "LEFT_SHOULDER"){
int x = kp.getPosition().getX();
int y = kp.getPosition().getY();
totalScore += kp.getScore();
position pos = new position();
pos.setX(x);
pos.setY(y);
pos.setAngle();
pp.setLEFT_SHOULDER(pos);
}else if(kp.getBodyPart().toString() == "RIGHT_SHOULDER"){
int x = kp.getPosition().getX();
int y = kp.getPosition().getY();
totalScore += kp.getScore();
position pos = new position();
pos.setX(x);
pos.setY(y);
pos.setAngle();
pp.setRIGHT_SHOULDER(pos);
}else if(kp.getBodyPart().toString() == "LEFT_ELBOW"){
int x = kp.getPosition().getX();
int y = kp.getPosition().getY();
totalScore += kp.getScore();
position pos = new position();
pos.setX(x);
pos.setY(y);
pos.setAngle();
pp.setLEFT_ELBOW(pos);
}else if(kp.getBodyPart().toString() == "RIGHT_ELBOW"){
int x = kp.getPosition().getX();
int y = kp.getPosition().getY();
totalScore += kp.getScore();
position pos = new position();
pos.setX(x);
pos.setY(y);
pos.setAngle();
pp.setRIGHT_ELBOW(pos);
}else if(kp.getBodyPart().toString() == "LEFT_WRIST"){
int x = kp.getPosition().getX();
int y = kp.getPosition().getY();
totalScore += kp.getScore();
position pos = new position();
pos.setX(x);
pos.setY(y);
pos.setAngle();
pp.setLEFT_WRIST(pos);
}else if(kp.getBodyPart().toString() == "RIGHT_WRIST"){
int x = kp.getPosition().getX();
int y = kp.getPosition().getY();
totalScore += kp.getScore();
position pos = new position();
pos.setX(x);
pos.setY(y);
pos.setAngle();
pp.setRIGHT_WRIST(pos);
}else if(kp.getBodyPart().toString() == "LEFT_HIP"){
int x = kp.getPosition().getX();
int y = kp.getPosition().getY();
totalScore += kp.getScore();
position pos = new position();
pos.setX(x);
pos.setY(y);
pos.setAngle();
pp.setLEFT_HIP(pos);
}else if(kp.getBodyPart().toString() == "RIGHT_HIP"){
int x = kp.getPosition().getX();
int y = kp.getPosition().getY();
totalScore += kp.getScore();
position pos = new position();
pos.setX(x);
pos.setY(y);
pos.setAngle();
pp.setRIGHT_HIP(pos);
}else if(kp.getBodyPart().toString() == "LEFT_KNEE"){
int x = kp.getPosition().getX();
int y = kp.getPosition().getY();
totalScore += kp.getScore();
position pos = new position();
pos.setX(x);
pos.setY(y);
pos.setAngle();
pp.setLEFT_KNEE(pos);
}else if(kp.getBodyPart().toString() == "RIGHT_KNEE"){
int x = kp.getPosition().getX();
int y = kp.getPosition().getY();
totalScore += kp.getScore();
position pos = new position();
pos.setX(x);
pos.setY(y);
pos.setAngle();
pp.setRIGHT_KNEE(pos);
}else if(kp.getBodyPart().toString() == "LEFT_ANKLE"){
int x = kp.getPosition().getX();
int y = kp.getPosition().getY();
totalScore += kp.getScore();
position pos = new position();
pos.setX(x);
pos.setY(y);
pos.setAngle();
pp.setLEFT_ANKLE(pos);
}else if(kp.getBodyPart().toString() == "RIGHT_ANKLE"){
int x = kp.getPosition().getX();
int y = kp.getPosition().getY();
totalScore += kp.getScore();
position pos = new position();
pos.setX(x);
pos.setY(y);
pos.setAngle();
pp.setRIGHT_ANKLE(pos);
}
}
double thresholdLLeg = Math.sqrt(Math.pow(pp.LEFT_KNEE.getX() - pp.LEFT_ANKLE.getX(), 2) + Math.pow(pp.LEFT_KNEE.getY() - pp.LEFT_ANKLE.getY(), 2));
double thresholdRLeg = Math.sqrt(Math.pow(pp.RIGHT_KNEE.getX() - pp.RIGHT_ANKLE.getX(), 2) + Math.pow(pp.RIGHT_KNEE.getY() - pp.RIGHT_ANKLE.getY(), 2));
thresholdLLeg = thresholdLLeg / 2;
thresholdRLeg = thresholdRLeg / 2;
// Check foot contact labels of each frame with the previous frames
if(totalCount == 0){
pp.setLeftFootContactLabel(true);
pp.setRightFootContactLabel(true);
// yMax = (pp.LEFT_ANKLE.getY() + pp.RIGHT_ANKLE.getY())/2;
}else{
posePart ppBefore = poseParts.get(totalCount - 1);
double leftFoot = ppBefore.getLEFT_ANKLE().getY() - pp.getLEFT_ANKLE().getY();
double rightFoot = ppBefore.getLEFT_ANKLE().getY() - pp.getLEFT_ANKLE().getY();
if(!(leftFoot <= thresholdLLeg && leftFoot > -thresholdLLeg)) {
if (leftFoot > thresholdLLeg) {
pp.setLeftFootContactLabel(false);
if (ppBefore.isLeftFootContactLabel()) {
ppBefore.setLeftFootContactLabel(true);
} else {
ppBefore.setLeftFootContactLabel(false);
}
} else if (leftFoot <= -thresholdLLeg){
pp.setLeftFootContactLabel(true);
if (!ppBefore.isLeftFootContactLabel())
ppBefore.setLeftFootContactLabel(true);
else
ppBefore.setLeftFootContactLabel(false);
}else{
pp.setLeftFootContactLabel(true);
ppBefore.setLeftFootContactLabel(true);
}
}else{
if(ppBefore.isLeftFootContactLabel())
pp.setLeftFootContactLabel(true);
else
pp.setLeftFootContactLabel(false);
}
if(!(rightFoot <= thresholdRLeg && rightFoot > -thresholdRLeg)) {
if (rightFoot > thresholdRLeg) {
pp.setRightFootContactLabel(false);
if (ppBefore.isLeftFootContactLabel()) {
ppBefore.setLeftFootContactLabel(true);
} else {
ppBefore.setLeftFootContactLabel(false);
}
} else if (rightFoot <= -thresholdRLeg) {
pp.setRightFootContactLabel(true);
if (!ppBefore.isLeftFootContactLabel())
ppBefore.setRightFootContactLabel(true);
else
ppBefore.setRightFootContactLabel(false);
} else {
pp.setRightFootContactLabel(true);
ppBefore.setRightFootContactLabel(true);
}
}else{
if(ppBefore.isRightFootContactLabel())
pp.setRightFootContactLabel(true);
else
pp.setRightFootContactLabel(false);
}
poseParts.set(totalCount-1, ppBefore);
// System.out.println("Left contact : "+ ppBefore.isLeftFootContactLabel());
// System.out.println("RIGHT contact : "+ ppBefore.isRightFootContactLabel());
}
totalScore = totalScore/keyPoints.size();
// System.out.println(totalScore);
if(totalScore > 0.6){
totalCount += 1;
score+=totalScore;
pp.setROOT_POSITION(pp.LEFT_HIP, pp.RIGHT_HIP);
pp.ROOT_POSITION.setAngle();
pp.setCHEST(pp.LEFT_SHOULDER, pp.RIGHT_SHOULDER);
pp.CHEST.setAngle();
poseParts.add(pp);
// System.out.println(pp.toString());
}
}
System.out.println("Average score : " + (score/totalCount));
System.out.println("Confident value size : "+poseParts.size());
return poseParts;
}
}
|
<reponame>streamglider/streamglider
//
// FeedsReader.h
// StreamGlider
//
// Created by <NAME> on 17/08/2011.
// Copyright 2011 StreamGlider, Inc. All rights reserved.
//
// This program is free software if used non-commercially: you can redistribute it and/or modify
// it under the terms of the BSD 4 Clause License as published by
// the Free Software Foundation.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// BSD 4 Clause License for more details.
//
// You should have received a copy of the BSD 4 Clause License
// along with this program. If not, see the README.md file with this program.
#import <Foundation/Foundation.h>
#import "APIDelegate.h"
#import "LoginVCDelegate.h"
@interface APIReader : NSObject <LoginVCDelegate, APIDelegate>
@property (nonatomic, assign) id<APIDelegate> delegate;
@property (nonatomic, retain) NSString *postData;
@property (nonatomic, assign) BOOL handleAuthError;
@property (nonatomic, assign) UIViewController *viewController;
@property (nonatomic, assign) BOOL addAuthToken;
@property (nonatomic, copy) NSString *pathAndQuery;
@property (nonatomic, copy) NSString *method;
@property (nonatomic, retain) APIReader *reader;
- (void)loadAPIDataFor:(NSString*)pathAndQuery;
- (void)loadAPIDataFor:(NSString*)pathAndQuery withMethod:(NSString*)method;
- (void)loadAPIDataFor:(NSString*)pathAndQuery withMethod:(NSString*)method addAuthToken:(BOOL)addAuthToken;
- (void)loadAPIDataFor:(NSString*)pathAndQuery withMethod:(NSString*)method addAuthToken:(BOOL)addAuthToken handleAuthError:(BOOL)handleAuthError;
@end
|
<reponame>chylex/Hardcore-Ender-Expansion
package chylex.hee.gui;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.InventoryPlayer;
import net.minecraft.inventory.Container;
import net.minecraft.inventory.ICrafting;
import net.minecraft.item.ItemStack;
import chylex.hee.gui.helpers.ContainerHelper;
import chylex.hee.system.util.MathUtil;
import chylex.hee.tileentity.base.TileEntityAbstractTable;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public abstract class ContainerAbstractTable extends Container{
protected final TileEntityAbstractTable table;
private int prevReqStardust, prevTime;
private float prevStoredEnergy;
public ContainerAbstractTable(InventoryPlayer inv, TileEntityAbstractTable table){
this.table = table;
registerSlots();
ContainerHelper.addPlayerInventorySlots(this, inv, 0, 0);
}
protected abstract void registerSlots();
@Override
public void addCraftingToCrafters(ICrafting crafter){
super.addCraftingToCrafters(crafter);
crafter.sendProgressBarUpdate(this, 0, table.getRequiredStardust());
crafter.sendProgressBarUpdate(this, 1, table.getTime());
crafter.sendProgressBarUpdate(this, 2, Float.floatToIntBits(table.getStoredEnergy()));
}
@Override
public void detectAndSendChanges(){
super.detectAndSendChanges();
for(int i = 0; i < crafters.size(); i++){
ICrafting crafter = (ICrafting)crafters.get(i);
if (prevReqStardust != table.getRequiredStardust())crafter.sendProgressBarUpdate(this, 0, table.getRequiredStardust());
if (prevTime != table.getTime())crafter.sendProgressBarUpdate(this, 1, table.getTime());
if (!MathUtil.floatEquals(prevStoredEnergy, table.getStoredEnergy()))crafter.sendProgressBarUpdate(this, 2, Float.floatToIntBits(table.getStoredEnergy()));
}
prevReqStardust = table.getRequiredStardust();
prevTime = table.getTime();
prevStoredEnergy = table.getStoredEnergy();
}
@Override
public ItemStack transferStackInSlot(EntityPlayer player, int slotId){
return ContainerHelper.transferStack(this, this::mergeItemStack, table.getSizeInventory(), slotId); // TODO test
}
@Override
@SideOnly(Side.CLIENT)
public void updateProgressBar(int id, int value){
if (id == 0)table.setRequiredStardustClient(value);
else if (id == 1)table.setTimeClient(value);
else if (id == 2)table.setStoredEnergyClient(Float.intBitsToFloat(value));
}
@Override
public final boolean canInteractWith(EntityPlayer player){
return table.isUseableByPlayer(player);
}
}
|
<gh_stars>0
package ch.raiffeisen.openbank.offer.controller.api;
import java.util.Date;
import org.springframework.hateoas.ResourceSupport;
import org.springframework.hateoas.core.Relation;
import ch.raiffeisen.openbank.common.controller.api.Amount;
import ch.raiffeisen.openbank.common.controller.api.Fee;
import ch.raiffeisen.openbank.offer.persistency.model.OfferType;
@Relation(value = "offer", collectionRelation = "offers")
public class OfferResource extends ResourceSupport {
private String accountId;
private String offerId;
private OfferType offerType;
private String description;
private Date startDateTime;
private Date endDateTime;
private Amount amount;
private Fee fee;
private String rate;
private String term;
private String url;
public String getAccountId() {
return accountId;
}
public void setAccountId(String accountId) {
this.accountId = accountId;
}
public String getOfferId() {
return offerId;
}
public void setOfferId(String offerId) {
this.offerId = offerId;
}
public OfferType getOfferType() {
return offerType;
}
public void setOfferType(OfferType offerType) {
this.offerType = offerType;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Date getStartDateTime() {
return startDateTime;
}
public void setStartDateTime(Date startDateTime) {
this.startDateTime = startDateTime;
}
public Date getEndDateTime() {
return endDateTime;
}
public void setEndDateTime(Date endDateTime) {
this.endDateTime = endDateTime;
}
public Amount getAmount() {
return amount;
}
public void setAmount(Amount amount) {
this.amount = amount;
}
public Fee getFee() {
return fee;
}
public void setFee(Fee fee) {
this.fee = fee;
}
public String getRate() {
return rate;
}
public void setRate(String rate) {
this.rate = rate;
}
public String getTerm() {
return term;
}
public void setTerm(String term) {
this.term = term;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
}
|
#!/bin/bash
# For Mac
if [ $(command uname) == "Darwin" ]; then
if ! [ -x "$(command -v greadlink)" ]; then
brew install coreutils
fi
BIN_PATH=$(greadlink -f "$0")
ROOT_DIR=$(dirname $(dirname $(dirname $(dirname $BIN_PATH))))
# For Linux
else
BIN_PATH=$(readlink -f "$0")
ROOT_DIR=$(dirname $(dirname $(dirname $(dirname $BIN_PATH))))
fi
echo "$ROOT_DIR"
export ROOT_DIR=${ROOT_DIR}
export PATH=${ROOT_DIR}/clang+llvm/bin:$PATH
export LD_LIBRARY_PATH=${ROOT_DIR}/clang+llvm/lib${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}
echo "Everything's fine!"
set -eux
cd $ROOT_DIR/evaluation/NEW/pixz-1.0.7
if ! [ -d "${ROOT_DIR}/evaluation/NEW/pixz-1.0.7/pixz-1.0.7" ]; then
wget https://github.com/vasi/pixz/releases/download/v1.0.7/pixz-1.0.7.tar.gz
tar xvf ./pixz-1.0.7.tar.gz
rm -rf ./pixz-1.0.7.tar.gz
fi
./cleanDIR.sh
cd pixz-1.0.7
CC=wllvm CXX=wllvm++ CFLAGS="-g" CXXFLAGS="-g" ./configure
make
cd src
extract-bc ./pixz
$ROOT_DIR/tool/staticAnalysis/staticAnalysis.sh pixz
export Con_PATH=$ROOT_DIR/evaluation/NEW/pixz-1.0.7/pixz-1.0.7/src/ConConfig.pixz
$ROOT_DIR/tool/staticAnalysis/DBDS-INSTRU/dbds-clang-fast++ -pthread -Wall -Wno-unknown-pragmas -g -fsanitize=address -o pixz pixz.bc -lm -larchive -llzma -lm
cd $ROOT_DIR/evaluation/NEW/pixz-1.0.7
# test
# $ROOT_DIR/tool/DBDS/run_PDS.py -d 3 ./pixz-1.0.7/src/pixz -c -k -p 3 test.tar
|
import config from './config'
var hljs = require('highlight.js')
var path = require('path');
// 配置表
export default {
mode: 'universal',
server: {
host: config.host, // default: localhost
port: config.port // 服务端口
},
/*
** Headers of the page
*/
head: {
title: 'umy-ui开发文档 - 为开发者准备的基于 Vue 2.0 的桌面端组件库,完美解决表格万级数据渲染卡顿问题',
meta: [
{ charset: 'utf-8' },
{ name: 'viewport', content: 'width=device-width, initial-scale=1' },
{ hid: 'description', name: 'description', content: 'umyui, umy-ui, umy ui, ui, UI库, vue ui库, 解决element表格卡顿' }
],
link: [
{ rel: 'icon', type: 'image/x-icon', href: '/favicon.ico' }
]
},
/*
** Customize the progress-bar color
*/
loading: { color: '#fff' },
/*
** Global CSS
*/
css: [
'element-ui/lib/theme-chalk/index.css',
'themes/default',
'themes/hightlight',
'themes/publicLess',
'themes/element-style/index',
// ../lib/theme-chalk/index.css // 测试
'../theme/index' // 开发
],
/*
** Plugins to load before mounting the App
*/
plugins: [
{ src: '~/plugins/ElementUI', ssr: true }, // ssr:true代表在服务端渲染,客户端不渲染
{ src: '~/plugins/UmyUi', ssr: false }, // umyui
{ src: '~/plugins/permission', ssr: false } // 权限拦截
],
/*
** Nuxt.js modules
*/
modules: [
'@nuxtjs/axios',
'@nuxtjs/markdownit'
],
markdownit: {
preset: 'default',
linkify: true,
breaks: true,
use: [
'markdown-it-div',
'markdown-it-attrs'
],
highlight: function (str, lang) {
if (lang && hljs.getLanguage(lang)) {
try {
return '<pre class="hljs"><code>' +
hljs.highlight(lang, str, true).value +
'</code></pre>';
} catch (__) {}
}
return '<pre class="hljs"><code>' + md.utils.escapeHtml(str) + '</code></pre>';
}
},
axios: config.axios,
/*
** Build configuration
*/
build: {
cssSourceMap: false, // 压缩JS 和 CSS
extractCSS:true, // 使用Vue 服务器端渲染指南启用常见CSS提取。减少打包体积
vendor: ['axios'],
transpile: [/^element-ui/],
loader:[
{
test:/\.sass$/,
loader:'style-loader!css-loader!sass-loader',
}
],
// 公用样式变量文件配置
styleResources:{
scss:'./themes/common.scss'
},
extend(config, ctx) {
Object.assign(config.resolve.alias, {
'umy-ui': path.resolve(__dirname, '../')
})
}
}
}
|
<gh_stars>1-10
from sidekick import lazy
import arcade
from .base import GameWindow
class HasScrollingCameraMixin(GameWindow):
"""
A basic game window that has a scrolling camera.
"""
#: The ratio of movement for background/foreground.
#: ratio = 0 => no move, ratio = 1 => sync with the foreground
parallax_ratio = 0.1
#: Tolerance of the reference point for the camera. It moves camera if
#: the reference point (usually the player) moves beyond those margins
#: Measured in pixels.
viewport_margin_horizontal = 200
viewport_margin_vertical = 120
#: x, y coordinates for the start of viewport area
viewport_horizontal_start = 0
viewport_vertical_start = 0
#: Automatically computed viewport end coordinates
@property
def viewport_horizontal_end(self):
return self.viewport_horizontal_start + self.width
@property
def viewport_vertical_end(self):
return self.viewport_vertical_start + self.height
#: Min/max coordinates of the viewport in both directions
scene_horizontal_start = 0
scene_horizontal_end = lazy(lambda _: _.width)
scene_vertical_start = 0
scene_vertical_end = lazy(lambda _: _.height)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._viewport_focus = (
self.viewport_horizontal_start,
self.viewport_vertical_start,
self.viewport_horizontal_end,
self.viewport_vertical_end,
)
def move_with_parallax(self, obj, parallax=None, left=0, bottom=0):
"""
Move object relative to viewport using paralax effect.
Args:
obj:
Displaced object
parallax:
Ratio between [0, 1] of paralax. If not given, uses the default
parallax.
left:
bottom:
Initial displacements of obj in respect with the current
viewport.
"""
parallax = self.parallax_ratio if parallax is None else parallax
viewport_x = self.viewport_horizontal_start
viewport_y = self.viewport_vertical_start
dx = obj[0].left - left - viewport_x * (1 - parallax)
dy = obj[0].bottom - bottom - viewport_y * (1 - parallax)
obj.move(round(-dx), round(-dy))
def move_with_viewport(self, obj, left=0, bottom=0):
"""
Move an object fixed with the background.
"""
self.move_with_parallax(obj, parallax=1.0, left=left, bottom=bottom)
#
# Register base implementations for class hooks
#
def on_viewport_changed(self):
"""
Hook that is executed when viewport is changed
"""
def get_viewport_focus(self):
"""
Return a bounding box of (x_min, y_min, x_max, y_max) with the region
that the viewport should try to focus.
"""
return self.width, self.height, self.width, self.height
#
# Override base class methods
#
def update_elements(self, dt):
super().update_elements(dt)
self.update_viewport()
def update_viewport(self):
"""
Update viewport to include the focused viewport area.
"""
xmin, ymin, xmax, ymax = self.get_viewport_focus()
changed = False
dx = self.viewport_margin_horizontal
dy = self.viewport_margin_vertical
v_xmin = self.viewport_horizontal_start
v_xmax = self.viewport_horizontal_end
v_ymin = self.viewport_vertical_start
v_ymax = self.viewport_vertical_end
# Check if player changed the viewport
if xmin < v_xmin + dx:
self.viewport_horizontal_start = \
max(self.scene_horizontal_start, xmin - dx)
changed = True
if xmax > v_xmax - dx:
self.viewport_horizontal_start = \
min(self.scene_horizontal_end - self.width,
xmax + dx - self.width)
changed = True
if ymin < v_ymin + dy:
self.viewport_vertical_start = \
max(self.scene_vertical_start, ymin - dy)
changed = True
if ymax > v_ymax - dy:
self.viewport_vertical_start = \
min(self.scene_vertical_end - self.width,
ymax + dy - self.height)
changed = True
if changed:
self.on_viewport_changed()
arcade.set_viewport(round(self.viewport_horizontal_start),
round(self.viewport_horizontal_end),
round(self.viewport_vertical_start),
round(self.viewport_vertical_end))
|
<reponame>vaniot-s/sentry<filename>src/sentry/static/sentry/app/views/settings/components/tag.tsx
import React from 'react';
import styled from '@emotion/styled';
import InlineSvg from 'app/components/inlineSvg';
import {Theme} from 'app/utils/theme';
import space from 'app/styles/space';
type Props = React.HTMLAttributes<HTMLDivElement> & {
priority?: keyof Theme['badge'] | keyof Theme['alert'];
size?: string;
icon?: string | React.ReactNode;
border?: boolean;
inline?: boolean;
};
type StyleFuncProps = Props & {theme: Theme};
/**
* Get priority from alerts or badge styles
*/
const getPriority = (p: StyleFuncProps) => {
if (p.priority) {
return p.theme.alert[p.priority] ?? p.theme.badge[p.priority] ?? null;
}
return null;
};
const getMarginLeft = (p: StyleFuncProps) =>
p.inline ? `margin-left: ${p.size === 'small' ? '0.25em' : '0.5em'};` : '';
const getBorder = (p: StyleFuncProps) =>
p.border ? `border: 1px solid ${getPriority(p)?.border ?? p.theme.gray400};` : '';
const Tag = styled(
({
children,
icon,
inline: _inline,
priority: _priority,
size: _size,
border: _border,
...props
}: Props) => (
<div {...props}>
{icon && (
<IconWrapper>
{React.isValidElement(icon) ? (
React.cloneElement(icon, {size: 'xs'})
) : typeof icon === 'string' ? (
<InlineSvg src={icon} size="12px" />
) : null}
</IconWrapper>
)}
{children}
</div>
)
)`
display: inline-flex;
box-sizing: border-box;
padding: ${p => (p.size === 'small' ? '0.1em 0.4em 0.2em' : '0.35em 0.8em 0.4em')};
font-size: ${p => p.theme.fontSizeExtraSmall};
line-height: 1;
color: ${p => (p.priority ? '#fff' : p.theme.gray800)};
text-align: center;
white-space: nowrap;
vertical-align: middle;
align-items: center;
border-radius: ${p => (p.size === 'small' ? '0.25em' : '2em')};
text-transform: lowercase;
font-weight: ${p => (p.size === 'small' ? 'bold' : 'normal')};
background: ${p => getPriority(p)?.background ?? p.theme.gray300};
${p => getBorder(p)};
${p => getMarginLeft(p)};
`;
const IconWrapper = styled('span')`
margin-right: ${space(0.5)};
`;
export default Tag;
|
<reponame>jamiels/askde
package controllers.askde;
import javax.inject.Inject;
import com.amazon.speech.json.SpeechletRequestEnvelope;
import com.amazon.speech.speechlet.IntentRequest;
import com.amazon.speech.speechlet.LaunchRequest;
import com.amazon.speech.speechlet.SessionEndedRequest;
import com.amazon.speech.speechlet.SessionStartedRequest;
import com.amazon.speech.speechlet.SpeechletResponse;
import controllers.raven.alexa.BaseAlexaController;
import play.Logger;
import services.askde.AskDESkillService;
import util.App;
import util.App;
public class AskDESkillController extends BaseAlexaController {
@Inject AskDESkillService dess;
public AskDESkillController() {
super();
App.prepareSystemProperties();
}
@Override
public SpeechletResponse onIntent(SpeechletRequestEnvelope<IntentRequest> requestEnvelope) {
Logger.info("onIntent requestId={}, sessionId={}", requestEnvelope.getRequest().getRequestId(),
requestEnvelope.getSession().getSessionId());
Logger.info("Fired onIntent");
return dess.invoke(requestEnvelope);
}
@Override
public SpeechletResponse onLaunch(SpeechletRequestEnvelope<LaunchRequest> requestEnvelope) {
Logger.info("Fired onLaunch");
return dess.getWelcomeMessage(requestEnvelope);
}
@Override
public void onSessionEnded(SpeechletRequestEnvelope<SessionEndedRequest> arg0) {
Logger.info("Fired onSessionEnded");
}
@Override
public void onSessionStarted(SpeechletRequestEnvelope<SessionStartedRequest> arg0) {
Logger.info("Fired onSessionStarted");
}
}
|
import './lesson-12.scss';
const widgets = document.querySelectorAll('.lighter');
// function expression
const lighter = (htmlElement) => {
const lighters = htmlElement.querySelectorAll('.light');
const btnToggle = htmlElement.querySelector('.btn-toggle');
let isEnabled = btnToggle.classList.contains('active');
if (isEnabled === false) {
btnToggle.textContent = 'OFF';
} else {
btnToggle.textContent = 'ON';
}
btnToggle.onclick = () => {
btnToggle.classList.toggle('active');
isEnabled = !isEnabled;
if (isEnabled === false) {
toggleOff();
btnToggle.textContent = 'OFF';
} else {
btnToggle.textContent = 'ON';
}
};
for (let light of lighters) {
light.onclick = function () {
if (isEnabled === true) {
toggleOff();
light.classList.add('active');
}
}
}
function toggleOff () {
for (let light of lighters) {
light.classList.remove('active');
}
}
};
widgets.forEach((el) => {
lighter(el);
});
|
//
// Copyright (c) 2017 <NAME> (<EMAIL>)
// Copyright (c) 2015 <NAME>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//
#ifndef TelegramBot_INCLUDED
#define TelegramBot_INCLUDED
#include <tgbot/TgTypeParser.h>
#include "HttpsClient.h"
#include "HttpClientHandler.h"
#include "HttpReqArg.h"
class TelegramBotHandler;
class TelegramBot : public HttpClientHandler
{
public:
enum ParseMode
{
None,
Markdown,
Html
};
TelegramBot(asio::io_service& ioService, TelegramBotHandler* handler = 0);
virtual ~TelegramBot();
void start();
void stop();
void setEventHandler(TelegramBotHandler* handler);
void setApiUrl(const std::string& apiUrl);
void setToken(const std::string& token);
void setReconnectDelay(int64_t delay);
void getMe();
void sendMessage(int64_t chatId, const std::string& text, ParseMode parseMode = ParseMode::None, bool disableWebPagePreview = false, bool disableNotification = false, int32_t replyToMessageId = 0, const TgBot::GenericReply::Ptr& replyMarkup = TgBot::GenericReply::Ptr());
void sendLocation(int64_t chatId, double latitude, double longitude, bool disableNotification = false, int32_t replyToMessageId = 0, const TgBot::GenericReply::Ptr& replyMarkup = TgBot::GenericReply::Ptr());
void sendVenue(int64_t chatId, double latitude, double longitude, const std::string& title, const std::string& address, const std::string& foursquareId = std::string(), bool disableNotification = false, int32_t replyToMessageId = 0, const TgBot::GenericReply::Ptr& replyMarkup = TgBot::GenericReply::Ptr());
void sendContact(int64_t chatId, const std::string& phoneNumber, const std::string& firstName, const std::string& lastName = std::string(), bool disableNotification = false, int32_t replyToMessageId = 0, const TgBot::GenericReply::Ptr& replyMarkup = TgBot::GenericReply::Ptr());
private:
enum Tags
{
TAG_NONE = 0,
TAG_GET_UPDATES = 1,
TAG_GET_ME = 2,
TAG_SEND_MESSAGE = 3,
TAG_SEND_LOCATION = 4,
TAG_SEND_VENUE = 5,
TAG_SEND_CONTACT = 6
};
void getUpdates(int32_t limit, int32_t timeout);
void makeRequest(const std::string& method, const std::vector<HttpReqArg>& arguments, unsigned int tag = 0, bool longPoll = false);
void handleUpdate(const TgBot::Update::Ptr& update);
void handleHttpClientError(const system::error_code& error);
void handleHttpClientIdle();
void handleHttpClientResponse(const HttpRequest& request, const std::string& response);
void handleTimerEvent(const system::error_code& error);
asio::deadline_timer _timer;
HttpsClient _client;
std::string _apiUrl;
std::string _token;
int32_t _lastUpdateId;
int64_t _timerDelay;
bool _enabled;
TelegramBotHandler* _handler;
};
inline void TelegramBot::setEventHandler(TelegramBotHandler* handler)
{
_handler = handler;
}
inline void TelegramBot::setApiUrl(const std::string& apiUrl)
{
_apiUrl = apiUrl;
}
inline void TelegramBot::setToken(const std::string& token)
{
_token = token;
}
inline void TelegramBot::setReconnectDelay(int64_t delay)
{
_timerDelay = delay;
}
#endif // TelegramBot_INCLUDED
|
<reponame>Abhigyan001/games_info
class ArticlesController < ApplicationController
before_action :require_user
def new
@article = Article.new
@categories = Category.all
end
def create
@article = current_user.articles.build(article_params)
if @article.save
@article.categories << Category.find(category_ids)
flash[:success] = 'Your article was created successfully'
redirect_to root_path
else
flash.now[:danger] = 'Ups, something went wrong, please check the errors'
@categories = Category.all
render :new
end
end
private
def article_params
params.require(:article).permit(:title, :text, :image)
end
def category_ids
params[:selected_id]
end
end
|
#include "thread_util.h"
#include <sstream>
#include <string>
#include <unistd.h>
using namespace std;
pthread_mutex_t GetterThread::mutex = PTHREAD_MUTEX_INITIALIZER;
void * GetterThread::GetMessage(void * p)
{
GetterThread * getter = (GetterThread*)(p);
stringstream inputStream;
char s = fgetc(getter->stream);
while (s != '\n' && s != EOF)
{
inputStream << s;
s = fgetc(getter->stream);
}
if (s == EOF)
{
getter->buffer = "";
getter->buffer += s;
return NULL;
}
pthread_mutex_lock(&mutex);
getter->buffer = inputStream.str();
pthread_mutex_unlock(&mutex);
getter->finished = true;
return NULL;
}
void * TimerThread::Timeout(void * p)
{
TimerThread * timer = (TimerThread*)(p);
usleep(timer->count);
timer->finished = true;
return NULL;
}
|
import React, {useState, useEffect} from 'react';
const App = () => {
const [data, setData] = useState(null);
const [text, setText] = useState('');
useEffect(() => {
const savedData = localStorage.getItem('data');
if (savedData) {
setData(JSON.parse(savedData));
}
}, []);
const handleChange = (e) => {
setText(e.target.value);
};
const handleSave = () => {
const newData = {...data, [text]: text};
setData(newData);
localStorage.setItem('data', JSON.stringify(newData));
};
return (
<div>
<input type="text" value={text} onChange={handleChange} />
<button onClick={handleSave}>Save</button>
</div>
);
};
export default App;
|
struct Product {
name: String,
stock_quantity: u32,
}
struct Inventory {
products: Vec<Product>,
}
impl Inventory {
// Add a new product to the inventory
fn add_product(&mut self, name: String, stock_quantity: u32) {
self.products.push(Product { name, stock_quantity });
}
// Remove a product from the inventory
fn remove_product(&mut self, name: &str) {
self.products.retain(|product| product.name != name);
}
// Update the stock quantity of a product in the inventory
fn update_stock_quantity(&mut self, name: &str, new_quantity: u32) {
if let Some(product) = self.products.iter_mut().find(|product| product.name == name) {
product.stock_quantity = new_quantity;
}
}
// Display the current inventory status
fn display_inventory(&self) {
for product in &self.products {
println!("Product: {}, Stock Quantity: {}", product.name, product.stock_quantity);
}
}
}
fn main() {
let mut inventory = Inventory { products: Vec::new() };
inventory.add_product("Apple".to_string(), 50);
inventory.add_product("Banana".to_string(), 100);
inventory.display_inventory();
inventory.update_stock_quantity("Apple", 60);
inventory.display_inventory();
inventory.remove_product("Banana");
inventory.display_inventory();
}
|
import gql from 'graphql-tag';
export default gql`
query getSubscription {
subscription {
isActive
}
}
`;
|
import React from 'react';
import Input from '../src/components/Input';
class TestInput extends React.Component {
static displayName = "@TestInput";
render() {
return (
<Input
success={true}
>
</Input>
);
}
}
const _styles = {
};
export default TestInput;
|
#include <immintrin.h>
void matrix_multiply_simd(const double* A, const double* B, double* C, int m, int n, int p) {
for (int i = 0; i < m; ++i) {
for (int j = 0; j < p; ++j) {
__m256d sum_avx2 = _mm256_setzero_pd(); // Initialize the sum for AVX2
__m512d sum_avx512 = _mm512_setzero_pd(); // Initialize the sum for AVX512
for (int k = 0; k < n; k += 4) {
__m256d a_avx2 = _mm256_load_pd(&A[i * n + k]); // Load 4 elements from A using AVX2
__m256d b_avx2 = _mm256_load_pd(&B[k * p + j]); // Load 4 elements from B using AVX2
sum_avx2 = _mm256_fmadd_pd(a_avx2, b_avx2, sum_avx2); // Perform fused multiply-add using AVX2
}
for (int k = 0; k < n; k += 8) {
__m512d a_avx512 = _mm512_load_pd(&A[i * n + k]); // Load 8 elements from A using AVX512
__m512d b_avx512 = _mm512_load_pd(&B[k * p + j]); // Load 8 elements from B using AVX512
sum_avx512 = _mm512_fmadd_pd(a_avx512, b_avx512, sum_avx512); // Perform fused multiply-add using AVX512
}
// Store the results back to the output matrix C
_mm256_store_pd(&C[i * p + j], sum_avx2); // Store the AVX2 result
_mm512_store_pd(&C[i * p + j], sum_avx512); // Store the AVX512 result
}
}
}
|
#!/bin/bash
# Copyright 2022 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
REPO_ROOT=$(dirname "${BASH_SOURCE[0]}")/..
cd "${REPO_ROOT}" || exit 1
echo "*** Verifying CAPV API conversions***"
make verify-conversions
|
<gh_stars>1-10
INSERT INTO previous_document (name_en, name) VALUES
(
'Junior Specialist Diploma ; E16 067991;dated from 30/06/ 2016; issued by : College of Electronic Devices of Ivano-Frankivsk National Technical University Oil and Gas',
'Диплом молодшого спеціаліста ; E16 067991; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma; E16 068008; dated from 30/06/ 2016; Issued by : College of Electronic Devices of Ivano-Frankivsk National Technical Oil and Gas University', 'Диплом молодшого спеціаліста ; E16 068008; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma; E16 067987; dated from 30/06/ 2016; Issued by : College of Electronic Devices of Ivano-Frankivsk National Technical Oil and Gas University', 'Диплом молодшого спеціаліста ; E16 067987; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma; E16 067997; dated from 30, 2016; Issued by : College of Electronic Devices of Ivano-Frankivsk National Technical Oil and Gas University', 'Диплом молодшого спеціаліста ; E16 067997; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma; E16 067985; dated from 30/06/ 2016; Issued by : College of Electronic Devices of Ivano-Frankivsk National Technical Oil and Gas University', 'Диплом молодшого спеціаліста ; E16 067985; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma ; E15 008123; dated from 30/06/ 2015; Issued by : College of Electronic Devices of Ivano-Frankivsk National Technical Oil and Gas University', 'Диплом молодшого спеціаліста ; E15 008123; 30.06.2015; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma ; E16 116443; dated from 07/05/2016; issued by Ivano-Frankivsk financial and commercial cooperative Stepana Granata college', 'Диплом молодшого спеціаліста ; E16 116443; 05.07.2016; Ким видано:Івано-Франківський фінансово-комерційний кооперативний коледж імені С. Граната'),
('Junior Specialist Diploma ; E16 068011; dated from 30/06/ 2016; Issued by : Colllege of Electronic Devices of Ivano-Frankivsk National Technical Oil and Gas University', 'Диплом молодшого спеціаліста ; E16 068011; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma with honors ); E16 085671; dated from 30/06/2016; issued by Kalush Polytechnic College', 'Диплом молодшого спеціаліста (З відзнакою) ; E16 085671; 30.06.2016; Ким видано:Державний вищий навчальний заклад "Калуський політехнічний коледж"'),
('Junior Specialist Diploma with honors ); E16 085671; dated from 30/06/2016; issued by Kalush Polytechnic College', 'Диплом молодшого спеціаліста ; E16 067993; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma ; E16 037912; dated from 25/06/2016; Issued by Ivano-Frankivsk national medical university', 'Диплом молодшого спеціаліста ; E16 037912; 25.06.2016; Ким видано:Державний вищий навчальний заклад "Івано-Франківський національний медичний університет"'),
('Junior Specialist Diploma; E16 068004; dated from 30/06/2016; issued by College of Electronic devices of Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 068004; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma; E16 068004; dated from 30/06/2016; issued by College of Electronic devices of Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста (З відзнакою) ; E16 068003; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma; E16 068006; dated from 30/06/ 2016; issued by College of Electronic devices of Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 068006; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma; E16 067998; dated from 30/06/ 2016; issued by College of Electronic devices of Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 067998; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma ; E16 067989; dated from 30/06/2016; issued by College of Electronic devices of Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 067989; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma; E16 068030; dated from 30/06/2016; issued by College of Electronic Devices of Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 068030; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma; E16 067986; dated from 30/06/2016; issued by College of Electronic devices of Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 067986; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma ; E16 068001; dated from 30/06/2016; issued by College of Electronic devices of Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 068001; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma; E16 068007; dated from 30/06/ 2016; Kim issued by College of Electronic devices of Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 068007; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma; E16 068007; dated from 30/06/ 2016; Kim issued by College of Electronic devices of Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 068022; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma ; E16 116429; datede from 07/05/2016; issued by Ivano-Frankivsk financial and commercial cooperative Stepana Granata College', 'Диплом молодшого спеціаліста ; E16 116429; 05.07.2016; Ким видано:Івано-Франківський фінансово-комерційний кооперативний коледж імені С. Граната'),
('Junior Specialist Diploma; E16 116447; dated from 07/05/2016; issued: Ivano-Frankivsk financial and commercial cooperative Stepana Granata college', 'Диплом молодшого спеціаліста ; E16 116447; 05.07.2016; Ким видано:Івано-Франківський фінансово-комерційний кооперативний коледж імені С. Граната'),
('Junior Specialist Diploma ; E16 068017; dated from 30/06/2016; issued by College of Electronic Devices Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 068017; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma ; E16 068017; dated from 30/06/2016; issued by College of Electronic Devices Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 068023; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma ; E16 068017; dated from 30/06/2016; issued by College of Electronic Devices Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 068014; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma ; E16 116444; dated from 07/05/2016; issued by Ivano-Frankivsk financial and commercial cooperative college Stepana Granata', 'Диплом молодшого спеціаліста ; E16 116444; 05.07.2016; Ким видано:Івано-Франківський фінансово-комерційний кооперативний коледж імені С. Граната'),
('Junior Specialist Diploma; E16 068005; dated from 30/06/ 2016; issued by College of electronic devices Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 068005; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma ; E16 128826; dated from 07.07.2016; issued by Separated structural unit - Kolomyia Polytechnic College of Lviv Polytechnic National University', 'Диплом молодшого спеціаліста ; E16 128826; 07.07.2016; <NAME>о:Відокремлений структурний підрозділ - Коломийський політехнічний коледж Національного університету "Львівська політехніка"'),
('Junior Specialist Diploma; E16 068009; dated from 30/06/ 2016; issued by College of electronic devices Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 068009; 30.06.2016; <NAME>о:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma ; VA 43304787; dated from 01/06/2012; issued by Ivano-Frankivsk financial and commercial cooperative Stepana Granata college', 'Диплом молодшого спеціаліста ; BA 43304787; 01.06.2012; Ким видано:Івано-Франківським фінанасово-комерційним кооперативним коледжем ім. <NAME>'),
('Junior Specialist Diploma; E16 068020; dated from 30/06/ 2016; issued by College electronic devices Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 068020; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma; E16 068020; dated from 30/06/ 2016; issued by College electronic devices Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 068031; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma; E16 125245; dated from 30/06/2016; Issued by Husyatyn college, Ternopil national technical Ivan Pulij University', 'Диплом молодшого спеціаліста ; E16 125245; 30.06.2016; Ким видано:Гусятинський коледж Тернопільського національного технічного університету імені <NAME>'),
('Junior Specialist Diploma; E16 068016; dated from 30/06/2016; issued by College of electronic devices Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 068016; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma ; E16 116445; dated from 07/05/2016; issued by Ivano-Frankivsk financial and commercial cooperative Stepana Granata college ', 'Диплом молодшого спеціаліста ; E16 116445; 05.07.2016; Ким видано:Івано-Франківський фінансово-комерційний кооперативний коледж імені С. Граната'),
('Junior Specialist Diploma ; E16 068018; dated from 30/06/2016; issued by College of electronic devices Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 068018; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma ; E16 067992; dated from 30/06/2016; issued by College of electronic devices Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 067992; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma; E16 067994; dated from 30/06/ 2016; issued by College of electronic devices Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 067994; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma; E16 068000; dated from 30/06/2016; issued by Collegeof electronic devices Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 068000; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma; E16 068000; dated from 30/06/2016; issued by Collegeof electronic devices Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E16 067984; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma ; VA 47525868; dsted from 4/07/ 2014; issued by Ivano-Frankivsk financial and commercial cooperative Stepana Granata college', 'Диплом молодшого спеціаліста ; BA 47525868; 04.07.2014; Ким видано:Івано-Франківський фінансово-комерційний кооперативний коледж імені С. Граната'),
('Junior Specialist Diploma ; VA 47525868; dsted from 4/07/ 2014; issued by Ivano-Frankivsk financial and commercial cooperative Stepana Granata college', 'Диплом молодшого спеціаліста ; E16 067996; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma ; E16 068021 30/06/ 2016; Issued by College of Electronic devices of Ivano-Frankivsk National Technical Oil and Gas University', 'Диплом молодшого спеціаліста ; E16 068021; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma ; E16 068021 30/06/ 2016; Issued by College of Electronic devices of Ivano-Frankivsk National Technical Oil and Gas University', 'Диплом молодшого спеціаліста ; E16 116423; 05.07.2016; Ким видано:Івано-Франківський фінансово-комерційний кооперативний коледж імені С. Граната'),
('Junior Specialist Diploma ; E16 068021 30/06/ 2016; Issued by College of Electronic devices of Ivano-Frankivsk National Technical Oil and Gas University', 'Диплом молодшого спеціаліста ; E16 128833; 07.07.2016; Ким видано:Відокремлений структурний підрозділ - Коломийський політехнічний коледж Національного університету "Львівська політехніка"'),
('Junior Specialist Diploma ; E16 068002; dated from 30/06/ 2016; Issued by College of Electronic devices', 'Диплом молодшого спеціаліста ; E16 068002; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma ; E16 068012; dated from 30/06/2016; Issued by College of Electronic devices of Ivano-Frankivsk National Technical Oil and Gas University', 'Диплом молодшого спеціаліста ; E16 068012; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma ; E16 116422; dated from 07/05/2016; issued by Ivano-Frankivsk financial and commercial cooperative Stepana Granata college', 'Диплом молодшого спеціаліста ; E16 116422; 05.07.2016; Ким видано:Івано-Франківський фінансово-комерційний кооперативний коледж імені С. Граната'),
('Junior Specialist Diploma ; E16 068026; dated from 30/06/ 2016; Issued by College of Electronic devices of Ivano-Frankivsk National Technical Oil and Gas University', 'Диплом молодшого спеціаліста ; E16 068026; 30.06.2016; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Junior Specialist Diploma; KV 43304328; dated from 05/06/2012; issued by Information Systems and Technologies College of Kyiv National Economic Vadym Hetman University', 'Диплом молодшого спеціаліста ; KB 43304328; 05.06.2012; Ким видано:Коледж інфомаційних систем і технологій Державного вищого навчального закладу "Київський національний економічний університет імені Вадима Гетьмана"'),
('Junior Specialist Diploma ; E15 008162; dated from 30/05/2015; issued by College of electronic devices Ivano-Frankivsk national technical oil and gas university', 'Диплом молодшого спеціаліста ; E15 008162; 30.06.2015; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
('Secondary School Leaving Certificate with honors VA 46836078; dated from 31.05.2014 issued by Mykytynci Secondary School of I-III degrees of Ivano-Frankivsk Council', 'Атестат про повну загальну середню освіту (Золота медаль) ; BA 46836078; 31.05.2014; Ким видано:Микитинецька загальноосвітня школа I-III ступенів Івано-Франківської міської ради Івано-Франківської області'),
('Secondary School Leaving Certificate VA 46253904 issued by Ivano-Frankivsk specialized school of I-III degrees № 5 with in-depth study of German language', 'Атестат про повну загальну середню освіту ; BA 46253904; 31.05.2014; Ким видано:Івано-Франківська спеціалізована школа I-III ступенів №5 з поглибленим вивченням німецької мови Івано-Франківської міської ради Івано-Франківської області'),
('Secondary School Leaving Certificate TE 46792135 issued by Zolotyj Potic school of I-III degrees', 'Атестат про повну загальну середню освіту ; TE 46792135; 31.05.2014; Ким видано:Золотопотіцька загальноосвітня школа I-III ступенів'),
('Junior Specialist Diploma E E15 111754 issued by Ivano-Frankivsk Professional Ivano-Frankivsk Higher professional school of equipment service', 'Диплом молодшого спеціаліста ; E15 111754; 30.06.2015; Ким видано:Державний професійно-технічний навчальний заклад "Івано-Франківське вище професійне училище сервісного обслуговування техніки"'),
('Secondary School Leaving Certificate VA 46253909 dated from 31/05/2014 issued by Ivano-Frankovsk Secondary School №5 with in depth study of German', 'Атестат про повну загальну середню освіту ; BA 46253909; 31.05.2014; Ким видано:Івано-Франківська спеціалізована школа I-III ступенів №5 з поглибленим вивченням німецької мови Івано-Франківської міської ради Івано-Франківської області'),
('Secondary School Leaving Certificate of a comprehensive general knowledge of the world; BA 46259578; Dated from 31/05/2014; issued by Ivano-Frankivsk Physical and Technical Boarding School if Ivano-Frankivsk regional Council', 'Атестат про повну загальну середню освіту ; BA 46259578; 31.05.2014; Ким видано:Івано-Франківський фізико-технічний ліцей-інтернат Івано-Франківської обласної ради'),
('Secondary School Leaving Certificate (Gold Medal); BA 46836575; dated from 31/05/ 2014; issued by Secondary school of I-III degrees of Kosiv district council in Ivano-Frankivsk region', 'Атестат про повну загальну середню освіту (Золота медаль) ; BA 46836575; 31.05.2014; Ким видано:Яблунівська загальноосвітня школа I-III ступенів Косівської районної ради Івано-Франківської області'),
('Secondary School Leaving Certificate VA 46836429(Gold Medal); dated from 31/05/ 2014; issued by Galych district council in Ivano-Frankivsk', 'Атестат про повну загальну середню освіту (Золота медаль) ; BA 46836429; 31.05.2014; Ким видано:Галицька гімназія Галицької районної ради Івано-Франківської області'),
('Secondary School Leaving Certificate; AK 46250661; dated from 31/05/ 2014; issued by Bilyn Secondary school of I-ІІІ degrees of Rakhiv district council', 'Атестат про повну загальну середню освіту ; AK 46250661; 31.05.2014; Ким видано:Білинська загальноосвітня школа I-IІI ступенів Рахівської районної ради Закарпатської області'),
('Junior Specialist Diploma E E15 045271 dated from 03.07.2015 issued by Ivano-Frankivsk Professional Financial and commercial cooperative Stepana Granata College', 'Диплом молодшого спеціаліста ; E15 045271; 03.07.2015; Ким видано:Івано-Франківський фінансово-комерційний кооперативний коледж імені С. Граната'),
('Secondary School Leaving Certificate VA 462553579; dated from 31/05/ 2014; issued by Ivano-Frankivsk specialized Secondary School №11 with in depth study of English language', 'Атестат про повну загальну середню освіту ; BA 46253579; 31.05.2014; Ким видано:Івано-Франківська спеціалізована школа I-III ступенів №11 з поглибленим вивченням англійської мови Івано-Франківської міської ради Івано-Франківської області'),
('Secondary School Leaving Certificate VA 46259597; dated from 31/05/ 2014; Issued by Ivano-Frankivsk Physical and Technical Boarding School', 'Атестат про повну загальну середню освіту ; BA 46259597; 31.05.2014; Ким видано:Івано-Франківський фізико-технічний ліцей-інтернат Івано-Франківської обласної ради'),
('Secondary School Leaving Certificate with honors VA 46837003; dated from 31/05/ 2014; Issued by Ivano-Frankivsk Physical and Technical Boarding Bohorodchany School', 'Атестат про повну загальну середню освіту (Срібна медаль) ; BA 46837003; 31.05.2014; Ким видано:Іваниківська загальноосвітня школа I-III ступенів Богородчанської районної ради Івано-Франківської області'),
('Secondary school Leaving Certificate VA 46258104; dated from 31.05.2014 issued by Viljshanytsia Secondary School of I-III degrees', 'Атестат про повну загальну середню освіту ; BA 46258104; 31.05.2014; Ким видано:Вільшаницька загальноосвітня школа I-III ступенів Тисменицької районної ради Івано-Франківської області'),
('Secondary School Leaving Certificate VA 35863399; dated from 23.06.2009 issued by Secondary School №22', 'Атестат про повну загальну середню освіту ; BA 35863399; 23.06.2009; Ким видано:Івано-Франківською ЗОШ І-ІІІ ступенів №22'),
('Secondary School Leaving Certificate VA 35863399; dated from 23.06.2009 issued by Secondary School №22', 'Атестат про повну загальну середню освіту ; BA 46253582; 31.05.2014; Ким видано:Івано-Франківська спеціалізована школа I-III ступенів №11 з поглибленим вивченням англійської мови Івано-Франківської міської ради Івано-Франківської області'),
('Secondary School Leaving Certificate VA 46254508; dated from 31.05.2014 issued by Molodkiv Secondary School', 'Атестат про повну загальну середню освіту ; BA 46254508; 31.05.2014; Ким видано:Молодківська загальноосвітня школа I-III ступенів Надвірнянської районної ради Івано-Франківської області'),
('Secondary School Leaving Certificate VA 46251661; dated from 31.05.2014 issued by Solotvyn educational Complex Secondary School of I-III degrees-lyceum', 'Атестат про повну загальну середню освіту ; BA 46251661; 31.05.2014; Ким видано:Солотвинський навчально-виховний комплекс "загальноосвітня школа I-III ступенів - ліцей" Богородчанської районної ради Івано-Франківської області'),
('Secondary School Leaving Certificate VA 46254129 dated from 31/05/2014 issued by Nadvirna lyceum in Ivano-Frankivsk region', 'Атестат про повну загальну середню освіту ; BA 46254129; 31.05.2014; Ким видано:Надвірнянський ліцей Надвірнянської районної ради Івано-Франківської області'),
('Secondary School Leaving Certificate VA 46254076; dated from 31/05/ 2014; Ivano-Frankivsk Natural sciences and mathematical', 'Атестат про повну загальну середню освіту ; BA 46254076; 31.05.2014; Ким видано:Івано-Франківський природничо-математичний ліцей Івано-Франківської міської ради Івано-Франківської області'),
('Secondary School Leaving Certificate VA 46251667; dated from 31/05/ 2014; Issued by: Solotvyn educational complex "secondary school of I-III degrees- lyceum "Bogorodchany district council in Ivano-Frankivsk region', 'Атестат про повну загальну середню освіту ; BA 46251667; 31.05.2014; Ким видано:Солотвинський навчально-виховний комплекс "загальноосвітня школа I-III ступенів - ліцей" Богородчанської районної ради Івано-Франківської області'),
('Secondary School Leaving Certificate (Gold Medal); VA 46836071; dated from 31/05/ 2014; Issued by: Ivano-Frankivsk secondary school of I-III degrees №22', 'Атестат про повну загальну середню освіту (Золота медаль) ; BA 46836071; 31.05.2014; Ким видано:Івано-Франківська загальноосвітня школа I-III ступенів №22 Івано-Франківської міської ради Івано-Франківської області'),
('Secondary School Leaving Certificate (Gold Medal); VA 46836071; dated from 31/05/ 2014; Issued by: Ivano-Frankivsk secondary school of I-III degrees №22', 'Атестат про повну загальну середню освіту ; BA 46255312; 31.05.2014; Ким видано:Ліснохлібичинська загальноосвітня школа I-III ступенів Коломийської районної ради Івано-Франківської області'),
('Secondary School Leaving Certificate; VA 18811294; dated from 21/06/ 2002; issued by Ivano-Frankivsk Secondary School №12', 'Атестат про повну загальну середню освіту ; BA 18811294; 21.06.2002; Ким видано:Загальноосвітня школа І-ІІІ ступенів №12 м.Івано-Франківськ'),
('Junior Specialist Diploma E15 045277; dated from 03.07.2015 issued by Ivano-Frankivsk financial and commercial cooperative Stepana Granata college', 'Диплом молодшого спеціаліста ; E15 045277; 03.07.2015; Ким видано:Івано-Франківський фінансово-комерційний кооперативний коледж імені С. Граната'),
('Secondary School Leaving Certificate; VA 46836450 dated from 31/05/2014; issued by Nastashyn Secondary school of I-III degrees №2 Galych district', 'Атестат про повну загальну середню освіту (Золота медаль) ; BA 46836450; 31.05.2014; Ким видано:Насташинська загальноосвітня школа I-III ступенів Галицької районної ради Івано-Франківської області'),
('Secondary School Leaving Certificate; VA 46256936 dated from 31/05/2014; issued by Pereginsk secondary school of I-III degrees №2 Ivano-Frankivsk Region', 'Атестат про повну загальну середню освіту ; BA 46256936; 31.05.2014; Ким видано:Перегінська загальноосвітня школа I-III ступенів №2 Рожнятівської районної ради Івано-Франківської області'),
('Junior Specialist Diploma ; E15 045280; dated from 03/07/ 2015; issued by Ivano-Frankivsk financial and commercial cooperative Stepana Granata college', 'Диплом молодшого спеціаліста ; E15 045280; 03.07.2015; Ким видано:Івано-Франківський фінансово-комерційний кооперативний коледж і<NAME>'),
('Junior Specialist Diploma ; E15 008201; dated from 30/05/ 2015; issued by College of Electronic devices of Ivano-Frankivsk national technical university oil and gas', 'Диплом молодшого спеціаліста ; E15 008201; 30.06.2015; Ким видано:Коледж електронних приладів Івано-Франківського національного технічного університету нафти і газу'),
(
'Secondary School Leaving Certificate; VA 46253336; dated from 31/05/2014; issued by Ivano-Frankivsk secondary school of I-III degrees №10',
'Атестат про повну загальну середню освіту ; BA 46253336; 31.05.2014; Ким видано:Івано-Франківська загальноосвітня школа I-III ступенів №10 Івано-Франківської міської ради Івано-Франківської області'),
(
'Secondary School Leaving Certificate; VA 46251954; dated from 31/05/ 2014; issuedby Gorodenka high school of І-ІІІ degrees №1',
'Атестат про повну загальну середню освіту ; BA 46251954; 31.05.2014; Ким видано:Городенківська загальноосвітня школа І-ІІІ ступенів №1'),
(
'Secondary School Leaving Certificate; VA 46257367; dated from 31/05/ 2014; issued by Nizhnostrutyn secondary school of I-III degrees of Rozhnyatov district council in Ivano-Frankivsk region',
'Атестат про повну загальну середню освіту ; BA 46257367; 31.05.2014; Ким видано:Нижньострутинська загальноосвітня школа I-III ступенів Рожнятівської районної ради Івано-Франківської області'),
(
'Junior Specialist Diploma; E15 045261; dated from 3/07/ 2015; issued by Ivano-Frankivsk financial and commercial cooperative Stepana Granata college',
'Диплом молодшого спеціаліста ; E15 045261; 03.07.2015; Ким видано:Івано-Франківський фінансово-комерційний кооперативний коледж імені С. Граната'),
(
'Junior Specialist Diploma; E15 045281; dated from 3/07/ 2015; issued by Ivano-Frankivsk financial and commercial cooperative Stepana Granata college',
'Диплом молодшого спеціаліста ; E15 045281; 03.07.2015; Ким видано:Івано-Франківський фінансово-комерційний кооперативний коледж імені С. Граната'),
(
'Secondary School Leaving Certificate RN 46608727 issued by Vashkivtsi Secondary school of I-III ступенів Chernivtsi Region',
'Атестат про повну загальну середню освіту ; PH 46608727; 31.05.2014; Ким видано:Вашківецька загальноосвітня школа I-III ступенів Вашківецької міської ради Вижницького району Чернівецької області'),
(
'Secondary School Leaving Certificate VA 46251608 issued by Bohorodchany Secondary school of I-III ступенів №2 Ivano-Frankivsk Region',
'Атестат про повну загальну середню освіту ; BA 46251608; 31.05.2014; Ким видано:Богородчанська загальноосвітня школа I-III ступенів №2 Богородчанської районної ради Івано-Франківської області'),
('Secondary School Leaving Certificate AK 37720857 issued by Mukachevo Higher professional School number 3',
'Атестат про повну загальну середню освіту ; AK 37720857; 12.02.2010; Ким видано:Вище професійне училище №3 м. Мукачево');
|
// Copyright (C) MongoDB, Inc. 2017-present.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
package command
import (
"context"
"github.com/rentiansheng/bk_bsonbson"
"github.com/rentiansheng/bk_bsonmongo/writeconcern"
"github.com/rentiansheng/bk_bson/x/bsonx"
"github.com/rentiansheng/bk_bsonx/mongo/driver/session"
"github.com/rentiansheng/bk_bsonx/network/description"
"github.com/rentiansheng/bk_bsonx/network/result"
"github.com/rentiansheng/bk_bsonx/network/wiremessage"
)
// Update represents the update command.
//
// The update command updates a set of documents with the database.
type Update struct {
ContinueOnError bool
Clock *session.ClusterClock
NS Namespace
Docs []bsonx.Doc
Opts []bsonx.Elem
WriteConcern *writeconcern.WriteConcern
Session *session.Client
batches []*WriteBatch
result result.Update
err error
}
// Encode will encode this command into a wire message for the given server description.
func (u *Update) Encode(desc description.SelectedServer) ([]wiremessage.WireMessage, error) {
err := u.encode(desc)
if err != nil {
return nil, err
}
return batchesToWireMessage(u.batches, desc)
}
func (u *Update) encode(desc description.SelectedServer) error {
batches, err := splitBatches(u.Docs, int(desc.MaxBatchCount), int(desc.MaxDocumentSize))
if err != nil {
return err
}
for _, docs := range batches {
cmd, err := u.encodeBatch(docs, desc)
if err != nil {
return err
}
u.batches = append(u.batches, cmd)
}
return nil
}
func (u *Update) encodeBatch(docs []bsonx.Doc, desc description.SelectedServer) (*WriteBatch, error) {
copyDocs := make([]bsonx.Doc, 0, len(docs)) // copy of all the documents
for _, doc := range docs {
newDoc := doc.Copy()
copyDocs = append(copyDocs, newDoc)
}
var options []bsonx.Elem
for _, opt := range u.Opts {
switch opt.Key {
case "upsert", "collation", "arrayFilters":
// options that are encoded on each individual document
for idx := range copyDocs {
copyDocs[idx] = append(copyDocs[idx], opt)
}
default:
options = append(options, opt)
}
}
command, err := encodeBatch(copyDocs, options, UpdateCommand, u.NS.Collection)
if err != nil {
return nil, err
}
return &WriteBatch{
&Write{
Clock: u.Clock,
DB: u.NS.DB,
Command: command,
WriteConcern: u.WriteConcern,
Session: u.Session,
},
len(docs),
}, nil
}
// Decode will decode the wire message using the provided server description. Errors during decoding
// are deferred until either the Result or Err methods are called.
func (u *Update) Decode(desc description.SelectedServer, wm wiremessage.WireMessage) *Update {
rdr, err := (&Write{}).Decode(desc, wm).Result()
if err != nil {
u.err = err
return u
}
return u.decode(desc, rdr)
}
func (u *Update) decode(desc description.SelectedServer, rdr bson.Raw) *Update {
u.err = bson.Unmarshal(rdr, &u.result)
return u
}
// Result returns the result of a decoded wire message and server description.
func (u *Update) Result() (result.Update, error) {
if u.err != nil {
return result.Update{}, u.err
}
return u.result, nil
}
// Err returns the error set on this command.
func (u *Update) Err() error { return u.err }
// RoundTrip handles the execution of this command using the provided wiremessage.ReadWriter.
func (u *Update) RoundTrip(
ctx context.Context,
desc description.SelectedServer,
rw wiremessage.ReadWriter,
) (result.Update, error) {
if u.batches == nil {
err := u.encode(desc)
if err != nil {
return result.Update{}, err
}
}
r, batches, err := roundTripBatches(
ctx, desc, rw,
u.batches,
u.ContinueOnError,
u.Session,
UpdateCommand,
)
// if there are leftover batches, save them for retry
if batches != nil {
u.batches = batches
}
if err != nil {
return result.Update{}, err
}
return r.(result.Update), nil
}
|
/ *
* Logic Circuit to check if a binary number is even or odd.
*
* Outputs:
* D1: True if number is even.
* D2: True if number is odd.
*/
Xor(a=in[0],b=in[1],c=in1);
Xor(d=in[2],e=in[3],f=in2);
Xor(g=in1,h=in2,out[0]=D1);
Xnor(i=in1,j=in2,out[1]=D2);
|
function add(a, b) {
return a + b;
}
function subtract(a, b) {
return a - b;
}
function multiply(a, b) {
return a * b;
}
function divide(a, b) {
return a / b;
}
function calculator(a, operator, b) {
switch(operator) {
case '+':
return add(a, b);
case '-':
return subtract(a, b);
case '*':
return multiply(a, b);
case '/':
return divide(a, b);
}
}
let result = calculator(10, '+', 5);
console.log(result); //15
|
#!/bin/bash
set -e
NETRC_CREDS="./_netrc"
RUNTIME_TOOL="./send_runtime"
SUDO_PHRASE=${RUNTIME_PHRASE}
RPC_ADDR="rpc.dev.azero.dev"
WS_ADDR="ws.dev.azero.dev"
echo -n $(date +"%d-%b-%y %T") " Checking runtime version on devnet: "
OLD_VER=$(curl -sS -H "Content-Type: application/json" -d '{"id":1, "jsonrpc":"2.0", "method": "state_getRuntimeVersion"}' $RPC_ADDR | jq .result.specVersion)
echo "$OLD_VER"
git clone -q https://github.com/Cardinal-Cryptography/aleph-node.git aleph-node
echo -n $(date +"%d-%b-%y %T") " Checking runtime version in latest source: "
NEW_VER=$(grep "spec_version:" aleph-node/bin/runtime/src/lib.rs | grep -o '[0-9]*')
echo "$NEW_VER"
if (( "$NEW_VER" == "$OLD_VER" )); then
echo $(date +"%d-%b-%y %T") " No update needed"
exit 0
fi
if (( "$NEW_VER" > "$OLD_VER" )); then
echo -n $(date +"%d-%b-%y %T") " Fetching latest runtime from github..."
ALEPH_RUNTIME_URL=$(curl -sS -H "Accept: application/vnd.github.v3+json" https://api.github.com/repos/Cardinal-Cryptography/aleph-node/actions/artifacts | jq '.artifacts' | jq -r '.[] | select(.name=="aleph-runtime") | .archive_download_url' | head -n 1)
curl -sS --netrc-file $NETRC_CREDS -L -o aleph-runtime.zip $ALEPH_RUNTIME_URL
echo "completed"
mkdir runtime
unzip aleph-runtime.zip -d runtime
NEW_RUNTIME=runtime/$(ls runtime)
echo -n $(date +"%d-%b-%y %T") " Sending runtime update... "
$RUNTIME_TOOL --url $WS_ADDR --sudo-phrase "$SUDO_PHRASE" $NEW_RUNTIME
echo "completed"
echo -n $(date +"%d-%b-%y %T") " Checking new runtime version on devnet: "
UPD_VER=$(curl -sS -H "Content-Type: application/json" -d '{"id":1, "jsonrpc":"2.0", "method": "state_getRuntimeVersion"}' $RPC_ADDR | jq .result.specVersion)
echo "$UPD_VER"
if (( $NEW_VER != $UPD_VER )); then
echo $(date +"%d-%b-%y %T") " ERROR: runtime update failed"
exit 1
fi
echo $(date +"%d-%b-%y %T") " SUCCESS: runtime updated"
fi
|
#=================================================
# SET ALL CONSTANTS
#=================================================
app=$YNH_APP_INSTANCE_NAME
dbname=$app
dbuser=$app
final_path="/opt/$app"
DATADIR="/home/$app"
REPO_PATH="$DATADIR/repositories"
DATA_PATH="$DATADIR/data"
# Detect the system architecture to download the right tarball
# NOTE: `uname -m` is more accurate and universal than `arch`
# See https://en.wikipedia.org/wiki/Uname
if [ -n "$(uname -m | grep 64)" ]; then
architecture="x86-64"
elif [ -n "$(uname -m | grep 86)" ]; then
architecture="i386"
elif [ -n "$(uname -m | grep armv7)" ]; then
architecture="armv7"
elif [ -n "$(uname -m | grep arm)" ]; then
architecture="arm"
else
ynh_die --message "Unable to detect your achitecture, please open a bug describing \
your hardware and the result of the command \"uname -m\"." 1
fi
#=================================================
# DEFINE ALL COMMON FONCTIONS
#=================================================
create_dir() {
mkdir -p "$final_path/data"
mkdir -p "$final_path/custom/conf"
mkdir -p "$REPO_PATH"
mkdir -p "$DATA_PATH/avatars"
mkdir -p "$DATA_PATH/attachments"
mkdir -p "/var/log/$app"
}
config_nginx() {
if [ "$path_url" != "/" ]
then
ynh_replace_string --match_string "^#sub_path_only" --replace_string "" --target_file "../conf/nginx.conf"
fi
ynh_add_nginx_config
}
config_gitea() {
ssh_port=$(grep -P "Port\s+\d+" /etc/ssh/sshd_config | grep -P -o "\d+")
ynh_backup_if_checksum_is_different --file "$final_path/custom/conf/app.ini"
cp ../conf/app.ini "$final_path/custom/conf"
usermod -s /bin/bash $app
if [ "$path_url" = "/" ]
then
ynh_replace_string --match_string __URL__ --replace_string "$domain" --target_file "$final_path/custom/conf/app.ini"
else
ynh_replace_string --match_string __URL__ --replace_string "$domain${path_url%/}" --target_file "$final_path/custom/conf/app.ini"
fi
ynh_replace_string --match_string __REPOS_PATH__ --replace_string "$REPO_PATH" --target_file "$final_path/custom/conf/app.ini"
ynh_replace_string --match_string __DB_PASSWORD__ --replace_string "$dbpass" --target_file "$final_path/custom/conf/app.ini"
ynh_replace_string --match_string __DB_USER__ --replace_string "$dbuser" --target_file "$final_path/custom/conf/app.ini"
ynh_replace_string --match_string __DOMAIN__ --replace_string "$domain" --target_file "$final_path/custom/conf/app.ini"
ynh_replace_string --match_string __KEY__ --replace_string "$key" --target_file "$final_path/custom/conf/app.ini"
ynh_replace_string --match_string __DATA_PATH__ --replace_string "$DATA_PATH" --target_file "$final_path/custom/conf/app.ini"
ynh_replace_string --match_string __PORT__ --replace_string $port --target_file "$final_path/custom/conf/app.ini"
ynh_replace_string --match_string __APP__ --replace_string $app --target_file "$final_path/custom/conf/app.ini"
ynh_replace_string --match_string __SSH_PORT__ --replace_string $ssh_port --target_file "$final_path/custom/conf/app.ini"
ynh_store_file_checksum --file "$final_path/custom/conf/app.ini"
}
set_permission() {
chown -R $app:$app "$final_path"
chown -R $app:$app "/home/$app"
chown -R $app:$app "/var/log/$app"
chmod u=rwX,g=rX,o= "$final_path"
chmod u=rwx,g=rx,o= "$final_path/gitea"
chmod u=rwx,g=rx,o= "$final_path/custom/conf/app.ini"
chmod u=rwX,g=rX,o= "/home/$app"
chmod u=rwX,g=rX,o= "/var/log/$app"
}
set_access_settings() {
if [ "$is_public" == '1' ];
then
ynh_permission_update --permission "main" --add "visitors"
fi
}
|
/***********************************************************************************************************************
* OpenStudio(R), Copyright (c) 2008-2021, Alliance for Sustainable Energy, LLC, and other contributors. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met:
*
* (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following
* disclaimer.
*
* (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
*
* (3) Neither the name of the copyright holder nor the names of any contributors may be used to endorse or promote products
* derived from this software without specific prior written permission from the respective party.
*
* (4) Other than as required in clauses (1) and (2), distributions in any form of modifications or other derivative works
* may not use the "OpenStudio" trademark, "OS", "os", or any other confusingly similar designation without specific prior
* written permission from Alliance for Sustainable Energy, LLC.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND ANY CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S), ANY CONTRIBUTORS, THE UNITED STATES GOVERNMENT, OR THE UNITED
* STATES DEPARTMENT OF ENERGY, NOR ANY OF THEIR EMPLOYEES, BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
***********************************************************************************************************************/
#include <gtest/gtest.h>
#include "EnergyPlusFixture.hpp"
#include "../ForwardTranslator.hpp"
#include "../../model/AvailabilityManagerHybridVentilation.hpp"
#include "../../model/Model.hpp"
#include "../../model/AirLoopHVAC.hpp"
#include "../../model/ScheduleConstant.hpp"
#include "../../model/AirTerminalSingleDuctConstantVolumeNoReheat.hpp"
#include "../../model/ThermalZone.hpp"
#include "../../model/ScheduleConstant.hpp"
#include "../../model/Schedule.hpp"
#include "../../model/CurveLinear.hpp"
#include "../../model/Curve.hpp"
#include "../../model/ZoneVentilationDesignFlowRate.hpp"
#include <utilities/idd/AvailabilityManager_HybridVentilation_FieldEnums.hxx>
#include <utilities/idd/IddEnums.hxx>
using namespace openstudio::energyplus;
using namespace openstudio::model;
using namespace openstudio;
TEST_F(EnergyPlusFixture, ForwardTranslator_AvailabilityManagerHybridVentilation) {
Model m;
AvailabilityManagerHybridVentilation avm(m);
avm.setName("My AvailabilityManagerHybridVentilation");
AirLoopHVAC a(m);
a.addAvailabilityManager(avm);
ASSERT_TRUE(avm.loop());
EXPECT_EQ(a, avm.loop().get());
// Controlled Zone: Optional Object
ThermalZone z(m);
z.setName("Zone 1");
Schedule alwaysOn = m.alwaysOnDiscreteSchedule();
AirTerminalSingleDuctConstantVolumeNoReheat atu(m, alwaysOn);
EXPECT_TRUE(a.addBranchForZone(z, atu));
EXPECT_TRUE(avm.setControlledZone(z));
ASSERT_TRUE(avm.controlledZone());
EXPECT_EQ(z, avm.controlledZone().get());
// Ventilation Control Mode Schedule: Required Object
ScheduleConstant ventilation_control_sch(m);
ventilation_control_sch.setName("Ventilation Control Mode Schedule");
EXPECT_TRUE(avm.setVentilationControlModeSchedule(ventilation_control_sch));
EXPECT_EQ(ventilation_control_sch, avm.ventilationControlModeSchedule());
// Use Weather File Rain Indicators: Required Boolean
EXPECT_TRUE(avm.setUseWeatherFileRainIndicators(true));
EXPECT_TRUE(avm.useWeatherFileRainIndicators());
// Maximum Wind Speed: Required Double
EXPECT_TRUE(avm.setMaximumWindSpeed(40.0));
EXPECT_EQ(40.0, avm.maximumWindSpeed());
// Minimum Outdoor Temperature: Required Double
EXPECT_TRUE(avm.setMinimumOutdoorTemperature(15));
EXPECT_EQ(15, avm.minimumOutdoorTemperature());
// Maximum Outdoor Temperature: Required Double
EXPECT_TRUE(avm.setMaximumOutdoorTemperature(35.0));
EXPECT_EQ(35.0, avm.maximumOutdoorTemperature());
// Minimum Outdoor Enthalpy: Required Double
EXPECT_TRUE(avm.setMinimumOutdoorEnthalpy(20000.0));
EXPECT_EQ(20000.0, avm.minimumOutdoorEnthalpy());
// Maximum Outdoor Enthalpy: Required Double
EXPECT_TRUE(avm.setMaximumOutdoorEnthalpy(30000.0));
EXPECT_EQ(30000.0, avm.maximumOutdoorEnthalpy());
// Minimum Outdoor Dewpoint: Required Double
EXPECT_TRUE(avm.setMinimumOutdoorDewpoint(17.0));
EXPECT_EQ(17.0, avm.minimumOutdoorDewpoint());
// Maximum Outdoor Dewpoint: Required Double
EXPECT_TRUE(avm.setMaximumOutdoorDewpoint(30.0));
EXPECT_EQ(30.0, avm.maximumOutdoorDewpoint());
// Minimum Outdoor Ventilation Air Schedule: Optional Object but set in Ctor and non optional
ScheduleConstant min_oa_sch(m);
min_oa_sch.setName("Min OA Schedule");
EXPECT_TRUE(avm.setMinimumOutdoorVentilationAirSchedule(min_oa_sch));
EXPECT_EQ(min_oa_sch, avm.minimumOutdoorVentilationAirSchedule());
// Opening Factor Function of Wind Speed Curve: Optional Object
CurveLinear opening_factor_curve(m);
opening_factor_curve.setName("Opening Factor Function of Wind Speed Curve");
EXPECT_TRUE(avm.setOpeningFactorFunctionofWindSpeedCurve(opening_factor_curve));
ASSERT_TRUE(avm.openingFactorFunctionofWindSpeedCurve());
// AirflowNetwork Control Type Schedule: Optional Object
ScheduleConstant afn_control_sch(m);
afn_control_sch.setName("AirflowNetwork Control Type Schedule");
EXPECT_TRUE(avm.setAirflowNetworkControlTypeSchedule(afn_control_sch));
ASSERT_TRUE(avm.airflowNetworkControlTypeSchedule());
// Simple Airflow Control Type Schedule: Optional Object
ScheduleConstant simple_control_sch(m);
simple_control_sch.setName("Simple Airflow Control Type Schedule");
EXPECT_TRUE(avm.setSimpleAirflowControlTypeSchedule(simple_control_sch));
ASSERT_TRUE(avm.simpleAirflowControlTypeSchedule());
EXPECT_EQ(simple_control_sch, avm.simpleAirflowControlTypeSchedule().get());
// ZoneVentilation Object: Optional Object
ZoneVentilationDesignFlowRate zv(m);
EXPECT_TRUE(avm.setZoneVentilationObject(zv));
ASSERT_TRUE(avm.zoneVentilationObject());
EXPECT_EQ(zv, avm.zoneVentilationObject().get());
// Minimum HVAC Operation Time: Required Double
EXPECT_TRUE(avm.setMinimumHVACOperationTime(0.1));
EXPECT_EQ(0.1, avm.minimumHVACOperationTime());
// Minimum Ventilation Time: Required Double
EXPECT_TRUE(avm.setMinimumVentilationTime(0.2));
EXPECT_EQ(0.2, avm.minimumVentilationTime());
// ForwardTranslate
ForwardTranslator forwardTranslator;
Workspace w = forwardTranslator.translateModel(m);
WorkspaceObjectVector idfObjs(w.getObjectsByType(IddObjectType::AvailabilityManager_HybridVentilation));
EXPECT_EQ(1u, idfObjs.size());
WorkspaceObject idf_avm(idfObjs[0]);
EXPECT_EQ(avm.nameString(), idf_avm.nameString());
// HVAC Air Loop Name
EXPECT_EQ(a.nameString(), idf_avm.getString(AvailabilityManager_HybridVentilationFields::HVACAirLoopName).get());
EXPECT_EQ(z.nameString(), idf_avm.getString(AvailabilityManager_HybridVentilationFields::ControlZoneName).get());
EXPECT_EQ(ventilation_control_sch.nameString(),
idf_avm.getString(AvailabilityManager_HybridVentilationFields::VentilationControlModeScheduleName).get());
EXPECT_EQ("Yes", idf_avm.getString(AvailabilityManager_HybridVentilationFields::UseWeatherFileRainIndicators).get());
EXPECT_EQ(40.0, idf_avm.getDouble(AvailabilityManager_HybridVentilationFields::MaximumWindSpeed).get());
EXPECT_EQ(15.0, idf_avm.getDouble(AvailabilityManager_HybridVentilationFields::MinimumOutdoorTemperature).get());
EXPECT_EQ(35.0, idf_avm.getDouble(AvailabilityManager_HybridVentilationFields::MaximumOutdoorTemperature).get());
EXPECT_EQ(20000.0, idf_avm.getDouble(AvailabilityManager_HybridVentilationFields::MinimumOutdoorEnthalpy).get());
EXPECT_EQ(30000.0, idf_avm.getDouble(AvailabilityManager_HybridVentilationFields::MaximumOutdoorEnthalpy).get());
EXPECT_EQ(17.0, idf_avm.getDouble(AvailabilityManager_HybridVentilationFields::MinimumOutdoorDewpoint).get());
EXPECT_EQ(30.0, idf_avm.getDouble(AvailabilityManager_HybridVentilationFields::MaximumOutdoorDewpoint).get());
EXPECT_EQ(min_oa_sch.nameString(), idf_avm.getString(AvailabilityManager_HybridVentilationFields::MinimumOutdoorVentilationAirScheduleName).get());
EXPECT_EQ(opening_factor_curve.nameString(),
idf_avm.getString(AvailabilityManager_HybridVentilationFields::OpeningFactorFunctionofWindSpeedCurveName).get());
EXPECT_EQ(afn_control_sch.nameString(),
idf_avm.getString(AvailabilityManager_HybridVentilationFields::AirflowNetworkControlTypeScheduleName).get());
EXPECT_EQ(simple_control_sch.nameString(),
idf_avm.getString(AvailabilityManager_HybridVentilationFields::SimpleAirflowControlTypeScheduleName).get());
EXPECT_EQ(zv.nameString(), idf_avm.getString(AvailabilityManager_HybridVentilationFields::ZoneVentilationObjectName).get());
EXPECT_EQ(0.1, idf_avm.getDouble(AvailabilityManager_HybridVentilationFields::MinimumHVACOperationTime).get());
EXPECT_EQ(0.2, idf_avm.getDouble(AvailabilityManager_HybridVentilationFields::MinimumVentilationTime).get());
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.