text stringlengths 1 1.05M |
|---|
def insertion_sort(arr):
for i in range(1, len(arr)):
current = arr[i]
# Move elements of arr[0..i-1], that are
# greater than current, to one position ahead
# of their current position
j = i-1
while j >= 0 and current < arr[j] :
arr[j + 1] = arr[j]
j -= 1
arr[j + 1] = current |
import React, { Component } from 'react';
import { Button, Icon, Tab, Checkbox, Message } from 'semantic-ui-react';
import uuid from 'uuid';
import moment from 'moment';
import advisor from '../../../api/diagnostic/advisor/index';
import collection from '../../../api/diagnostic/collection/index';
import sentry from '../../../api/sentry/index';
import status from '../../../api/status/index';
import Spinner from '../../ui/scaffold/Spinner/Spinner';
import Advisor from '../advisor/Advisor';
import ConfigurationDiff from '../ConfigurationDiff/ConfigurationDiff';
import hoc from '../../higherOrderComponents';
import JSONDownload from '../../data/download/JSONDownload';
const UPLOAD_DIAGNOSTICS_BY_DEFAULT = false;
class GeneratePackage extends Component {
state = {
key: uuid.v4(),
message: null,
error: null,
loading: false,
userIsAdmin: false,
upload: UPLOAD_DIAGNOSTICS_BY_DEFAULT,
headers: [
{ label: 'domain', key: 'domain' },
{ label: 'node', key: 'node' },
{ label: 'key', key: 'key' },
{ label: 'value', key: 'value' },
],
};
generatePackage = () => {
this.setState({
message: status.message('Generating package', 'Please wait while data is gathered'),
loading: true,
});
const fail = err => {
sentry.error(err);
this.setState({
diagnosticData: null,
dataGenerated: null,
loading: false,
error: status.message('Failed to generate package', `${err}`),
});
};
try {
return collection.runDiagnostics(window.halinContext)
.then(data => {
this.setState({
loading: false,
diagnosticData: data,
dataGenerated: moment().format('YYYY-MM-DD-HH-mm-ss'),
message: null,
error: null,
});
if (this.state.upload) {
return this.uploadDiagnostics(data);
}
})
.catch(err => fail(err));
} catch (err) {
fail(err);
}
sentry.info('Generating diagnostic package');
};
renderDiagnosticAdvice() {
if (!this.state.diagnosticData) {
return '';
}
const panes = [
{
menuItem: 'Advisor',
render: () =>
<Tab.Pane>
<Advisor
key={uuid.v4()}
data={advisor.generateRecommendations(this.state.diagnosticData)}
/>
</Tab.Pane>,
},
];
if (window.halinContext.isCluster()) {
panes.push({
menuItem: 'Configuration Diff',
render: () =>
<Tab.Pane>
<ConfigurationDiff data={this.state.diagnosticData} />
</Tab.Pane>
});
}
panes.push({
menuItem: 'Package Viewer',
render: () =>
<Tab.Pane>
<div className='PackageViewer' style={{textAlign:'left'}}>
<pre>{JSON.stringify(this.state.diagnosticData.halin, null, 2)}</pre>
</div>
</Tab.Pane>
})
return (<Tab menu={{ borderless: true, attached: false, tabular: false }} panes={panes} />);
}
componentDidMount() {
if (window.halinContext.getCurrentUser().roles.indexOf('admin') === -1) {
return this.setState({ userIsAdmin: false });
}
return this.setState({ userIsAdmin: true });
}
toggleUpload(event, data) {
// sentry.fine(event, data);
this.setState({
upload: data.checked,
});
}
uploadDiagnostics(pkg) {
const url = 'https://api.halin.graphapp.io/reporter-dev-report/';
return fetch(url, {
method: 'post',
mode: 'no-cors',
cache: 'no-cache',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json',
},
body: JSON.stringify(pkg),
})
.then(resp => sentry.fine('Reported diagnostic package', resp))
.catch(err => sentry.error('Failed to upload', err));
}
render() {
let message = status.formatStatusMessage(this);
return (
<div className='GeneratePackage'>
<div style={{ marginBottom: '15px' }}>
<Message compact success>
<div>
<Checkbox
checked={this.state.upload}
onClick={(event, data) => this.toggleUpload(event, data)}
label={{ children: 'Help improve Halin by sharing diagnostics with Neo4j' }}
>
</Checkbox>
</div>
</Message>
</div>
<Button primary disabled={this.state.loading}
onClick={this.generatePackage}>
<Icon name='cogs'/>
Run Diagnostics!
</Button>
{ this.state.diagnosticData ? (
<JSONDownload
data={this.state.diagnosticData}
title="Download Diagnostics"
filename={`neo4j-diagnostics-${this.state.dataGenerated}.json`}
/>
) : '' }
<div style={{
marginTop: '15px',
marginLeft: '50px',
marginRight: '50px',
marginBottom: '15px',
}}>
{ message }
</div>
{
this.state.loading ? <Spinner active={this.state.loading} /> : ''
}
{ this.renderDiagnosticAdvice() }
</div>
);
}
}
export default hoc.adminOnlyComponent(GeneratePackage, null, false); |
/* Blockquote button for hallo.js
* Adapted from https://gist.github.com/SalahAdDin/347e4fab78a64eaadd5c
*/
(function() {
(function($) {
return $.widget("IKS.blockquotebutton", {
options: {
uuid: '',
editable: null
},
populateToolbar: function(toolbar) {
var button, widget;
widget = this;
button = $('<span></span>');
button.hallobutton({
uuid: this.options.uuid,
editable: this.options.editable,
label: 'Blockquote',
icon: 'icon-openquote',
command: null
});
toolbar.append(button);
button.on('click', function() {
var node = widget.options.editable.getSelection();
var parent = $(node.endContainer).parentsUntil('.richtext').last();
$(parent).wrap('<blockquote></blockquote>');
widget.options.editable.setModified();
});
}
});
})(jQuery);
}).call(this); |
package com.xq.tmall.entity;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Locale;
/**
* 订单实体类
*
*/
public class ProductOrder {
private Integer productOrder_id/*订单ID*/;
private String productOrder_code/*订单流水号*/;
private Address productOrder_address/*订单地址*/;
private String productOrder_detail_address/*订单详细地址*/;
private String productOrder_post/*订单邮政编码*/;
private String productOrder_receiver/*订单收货人名称*/;
private String productOrder_mobile/*订单收货人号码*/;
private Date productOrder_pay_date/*订单支付日期*/;
private Date productOrder_delivery_date/*订单发货日期*/;
private Date productOrder_confirm_date/*订单确认日期*/;
private Byte productOrder_status/*订单状态*/;
private User productOrder_user/*订单对应用户*/;
private List<ProductOrderItem> productOrderItemList/*订单项集合*/;
@Override
public String toString() {
return "ProductOrder{" +
"productOrder_id=" + productOrder_id +
", productOrder_code='" + productOrder_code + '\'' +
", productOrder_address=" + productOrder_address +
", productOrder_detail_address='" + productOrder_detail_address + '\'' +
", productOrder_post='" + productOrder_post + '\'' +
", productOrder_receiver='" + productOrder_receiver + '\'' +
", productOrder_mobile='" + productOrder_mobile + '\'' +
", productOrder_pay_date=" + productOrder_pay_date +
", productOrder_delivery_date=" + productOrder_delivery_date +
", productOrder_confirm_date=" + productOrder_confirm_date +
", productOrder_status=" + productOrder_status +
", productOrder_user=" + productOrder_user +
", productOrderItemList=" + productOrderItemList +
'}';
}
public ProductOrder() {
}
public ProductOrder(Integer productOrder_id, String productOrder_code, Address productOrder_address, String productOrder_detail_address, String productOrder_post, String productOrder_receiver, String productOrder_mobile, Date productOrder_pay_date, Byte productOrder_status, User productOrder_user) {
this.productOrder_id = productOrder_id;
this.productOrder_code = productOrder_code;
this.productOrder_address = productOrder_address;
this.productOrder_detail_address = productOrder_detail_address;
this.productOrder_post = productOrder_post;
this.productOrder_receiver = productOrder_receiver;
this.productOrder_mobile = productOrder_mobile;
this.productOrder_pay_date = productOrder_pay_date;
this.productOrder_status = productOrder_status;
this.productOrder_user = productOrder_user;
}
public ProductOrder(Integer productOrder_id, String productOrder_code, Address productOrder_address, String productOrder_detail_address, String productOrder_post, String productOrder_receiver, String productOrder_mobile, Date productOrder_pay_date, Date productOrder_delivery_date, Date productOrder_confirm_date, Byte productOrder_status, User productOrder_user, List<ProductOrderItem> productOrderItemList) {
this.productOrder_id = productOrder_id;
this.productOrder_code = productOrder_code;
this.productOrder_address = productOrder_address;
this.productOrder_detail_address = productOrder_detail_address;
this.productOrder_post = productOrder_post;
this.productOrder_receiver = productOrder_receiver;
this.productOrder_mobile = productOrder_mobile;
this.productOrder_pay_date = productOrder_pay_date;
this.productOrder_delivery_date = productOrder_delivery_date;
this.productOrder_confirm_date = productOrder_confirm_date;
this.productOrder_status = productOrder_status;
this.productOrder_user = productOrder_user;
this.productOrderItemList = productOrderItemList;
}
public Integer getProductOrder_id() {
return productOrder_id;
}
public ProductOrder setProductOrder_id(Integer productOrder_id) {
this.productOrder_id = productOrder_id;
return this;
}
public String getProductOrder_code() {
return productOrder_code;
}
public ProductOrder setProductOrder_code(String productOrder_code) {
this.productOrder_code = productOrder_code;
return this;
}
public Address getProductOrder_address() {
return productOrder_address;
}
public ProductOrder setProductOrder_address(Address productOrder_address) {
this.productOrder_address = productOrder_address;
return this;
}
public String getProductOrder_detail_address() {
return productOrder_detail_address;
}
public ProductOrder setProductOrder_detail_address(String productOrder_detail_address) {
this.productOrder_detail_address = productOrder_detail_address;
return this;
}
public String getProductOrder_post() {
return productOrder_post;
}
public ProductOrder setProductOrder_post(String productOrder_post) {
this.productOrder_post = productOrder_post;
return this;
}
public String getProductOrder_receiver() {
return productOrder_receiver;
}
public ProductOrder setProductOrder_receiver(String productOrder_receiver) {
this.productOrder_receiver = productOrder_receiver;
return this;
}
public String getProductOrder_mobile() {
return productOrder_mobile;
}
public ProductOrder setProductOrder_mobile(String productOrder_mobile) {
this.productOrder_mobile = productOrder_mobile;
return this;
}
public String getProductOrder_pay_date() {
if (productOrder_pay_date != null) {
SimpleDateFormat time = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.UK);
return time.format(productOrder_pay_date);
}
return null;
}
public ProductOrder setProductOrder_pay_date(Date productOrder_pay_date) {
this.productOrder_pay_date = productOrder_pay_date;
return this;
}
public String getProductOrder_delivery_date() {
if (productOrder_delivery_date != null) {
SimpleDateFormat time = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.UK);
return time.format(productOrder_delivery_date);
}
return null;
}
public ProductOrder setProductOrder_delivery_date(Date productOrder_delivery_date) {
this.productOrder_delivery_date = productOrder_delivery_date;
return this;
}
public String getProductOrder_confirm_date() {
if (productOrder_confirm_date != null) {
SimpleDateFormat time = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.UK);
return time.format(productOrder_confirm_date);
}
return null;
}
public ProductOrder setProductOrder_confirm_date(Date productOrder_confirm_date) {
this.productOrder_confirm_date = productOrder_confirm_date;
return this;
}
public Byte getProductOrder_status() {
return productOrder_status;
}
public ProductOrder setProductOrder_status(Byte productOrder_status) {
this.productOrder_status = productOrder_status;
return this;
}
public User getProductOrder_user() {
return productOrder_user;
}
public ProductOrder setProductOrder_user(User productOrder_user) {
this.productOrder_user = productOrder_user;
return this;
}
public List<ProductOrderItem> getProductOrderItemList() {
return productOrderItemList;
}
public ProductOrder setProductOrderItemList(List<ProductOrderItem> productOrderItemList) {
this.productOrderItemList = productOrderItemList;
return this;
}
}
|
#include <stdbool.h>
bool checkDuplicate(int arr[], int size) {
int i, j;
for (i = 0; i < size; i++) {
for (j = 0; j < i; j++) {
if (arr[i] == arr[j]) {
return true;
}
}
}
return false;
} |
#!/usr/bin/env sh
# generated from catkin/cmake/template/setup.sh.in
# Sets various environment variables and sources additional environment hooks.
# It tries it's best to undo changes from a previously sourced setup file before.
# Supported command line options:
# --extend: skips the undoing of changes from a previously sourced setup file
# since this file is sourced either use the provided _CATKIN_SETUP_DIR
# or fall back to the destination set at configure time
: ${_CATKIN_SETUP_DIR:=/home/ali/formation_TV/install}
_SETUP_UTIL="$_CATKIN_SETUP_DIR/_setup_util.py"
unset _CATKIN_SETUP_DIR
if [ ! -f "$_SETUP_UTIL" ]; then
echo "Missing Python script: $_SETUP_UTIL"
return 22
fi
# detect if running on Darwin platform
_UNAME=`uname -s`
_IS_DARWIN=0
if [ "$_UNAME" = "Darwin" ]; then
_IS_DARWIN=1
fi
unset _UNAME
# make sure to export all environment variables
export CMAKE_PREFIX_PATH
export CPATH
if [ $_IS_DARWIN -eq 0 ]; then
export LD_LIBRARY_PATH
else
export DYLD_LIBRARY_PATH
fi
unset _IS_DARWIN
export PATH
export PKG_CONFIG_PATH
export PYTHONPATH
# remember type of shell if not already set
if [ -z "$CATKIN_SHELL" ]; then
CATKIN_SHELL=sh
fi
# invoke Python script to generate necessary exports of environment variables
# use TMPDIR if it exists, otherwise fall back to /tmp
if [ -d "${TMPDIR}" ]; then
_TMPDIR="${TMPDIR}"
else
_TMPDIR=/tmp
fi
_SETUP_TMP=`mktemp "${_TMPDIR}/setup.sh.XXXXXXXXXX"`
unset _TMPDIR
if [ $? -ne 0 -o ! -f "$_SETUP_TMP" ]; then
echo "Could not create temporary file: $_SETUP_TMP"
return 1
fi
CATKIN_SHELL=$CATKIN_SHELL "$_SETUP_UTIL" $@ >> "$_SETUP_TMP"
_RC=$?
if [ $_RC -ne 0 ]; then
if [ $_RC -eq 2 ]; then
echo "Could not write the output of '$_SETUP_UTIL' to temporary file '$_SETUP_TMP': may be the disk if full?"
else
echo "Failed to run '\"$_SETUP_UTIL\" $@': return code $_RC"
fi
unset _RC
unset _SETUP_UTIL
rm -f "$_SETUP_TMP"
unset _SETUP_TMP
return 1
fi
unset _RC
unset _SETUP_UTIL
. "$_SETUP_TMP"
rm -f "$_SETUP_TMP"
unset _SETUP_TMP
# source all environment hooks
_i=0
while [ $_i -lt $_CATKIN_ENVIRONMENT_HOOKS_COUNT ]; do
eval _envfile=\$_CATKIN_ENVIRONMENT_HOOKS_$_i
unset _CATKIN_ENVIRONMENT_HOOKS_$_i
eval _envfile_workspace=\$_CATKIN_ENVIRONMENT_HOOKS_${_i}_WORKSPACE
unset _CATKIN_ENVIRONMENT_HOOKS_${_i}_WORKSPACE
# set workspace for environment hook
CATKIN_ENV_HOOK_WORKSPACE=$_envfile_workspace
. "$_envfile"
unset CATKIN_ENV_HOOK_WORKSPACE
_i=$((_i + 1))
done
unset _i
unset _CATKIN_ENVIRONMENT_HOOKS_COUNT
|
<gh_stars>10-100
module Trith
##
# A Trith function.
class Function
URI = RDF::URI("http://trith.org/lang/Function").freeze
# @return [RDF::Resource]
attr_accessor :id
# @return [Symbol]
attr_accessor :label
# @return [String]
attr_accessor :comment
##
# @param [RDF::Resource] id
def initialize(id = nil, options = {}, &block)
@id = case id
when nil then RDF::Node.new
when RDF::Resource then id
else RDF::Resource.new(id)
end
if block_given?
case block.arity
when 1 then block.call(self)
else self.instance_eval(&block)
end
end
end
##
# @return [Symbol]
def label
@label ||= id.to_s.sub(/^(.*)\/([^\/]+)$/, '\2').to_sym # FIXME
end
##
# @return [Enumerable<Symbol>]
def labels
[label] # FIXME
end
##
# Returns `true` if this is a primitive function.
#
# Primitive functions are implemented directly in the Trith interpreter
# or compiler using native code.
#
# @return [Boolean]
def is_primitive?
false # TODO
end
alias_method :is_native?, :is_primitive?
##
# Returns a developer-friendly representation of this function.
#
# @return [String]
def inspect
sprintf("#<%s:%#0x(%s)>", self.class.name, __id__, id.to_s)
end
##
# Returns the RDF representation of this function.
#
# @return [RDF::Graph]
def to_rdf
RDF::Graph.new(id) do |graph|
graph << [id, RDF.type, Trith::Function::URI]
graph << [id, RDF::RDFS.label, label] if label
graph << [id, RDF::RDFS.comment, label] if comment
end
end
##
# @return [Enumerator]
def each_statement(&block)
to_rdf.each_statement(&block)
end
end # class Function
end # module Trith
|
module Settings
API_URL = 'http://your.example.com:9292'
end
|
<filename>src/main/java/chylex/hee/mechanics/enhancements/EnhancementList.java
package chylex.hee.mechanics.enhancements;
import java.util.EnumMap;
import java.util.List;
import java.util.Map.Entry;
import java.util.stream.Collectors;
import net.minecraft.client.resources.I18n;
import net.minecraft.item.ItemStack;
import net.minecraft.util.EnumChatFormatting;
import net.minecraft.util.StatCollector;
import org.apache.commons.lang3.EnumUtils;
import chylex.hee.system.abstractions.nbt.NBT;
import chylex.hee.system.util.DragonUtil;
import com.google.common.base.Splitter;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public class EnhancementList<T extends Enum<T>>{
private final Class<T> enumCls;
private final EnumMap<T, Byte> map;
public EnhancementList(Class<T> enumCls){
this.enumCls = enumCls;
this.map = new EnumMap<>(enumCls);
}
public boolean isEmpty(){
return map.isEmpty();
}
public boolean has(T enhancement){
return map.containsKey(enhancement);
}
public int get(T enhancement){
return map.getOrDefault(enhancement, (byte)0);
}
public void set(T enhancement, int level){
map.put(enhancement, (byte)level);
}
public void upgrade(T enhancement){
map.merge(enhancement, (byte)1, (prev, set) -> (byte)(prev+1));
}
public void replace(EnhancementList<T> replacement){
map.clear();
for(Entry<T, Byte> entry:replacement.map.entrySet())map.put(entry.getKey(), entry.getValue());
}
public String serialize(){
return map.entrySet().stream().map(entry -> entry.getKey().name()+":"+entry.getValue()).collect(Collectors.joining(";"));
}
public void deserialize(String str){
map.clear();
for(Entry<String, String> entry:Splitter.on(';').omitEmptyStrings().withKeyValueSeparator(':').split(str).entrySet()){
T enh = EnumUtils.getEnum(enumCls, entry.getKey());
byte lvl = (byte)DragonUtil.tryParse(entry.getValue(), 0);
if (enh != null && lvl > 0)map.put(enh, Byte.valueOf(lvl));
}
}
@SideOnly(Side.CLIENT)
public void addTooltip(List<String> tooltipList, EnumChatFormatting color){
if (map.isEmpty())tooltipList.add(EnumChatFormatting.GRAY+I18n.format("enhancements.none"));
else{
for(Entry<T, Byte> entry:map.entrySet()){
tooltipList.add(color+EnhancementRegistry.getEnhancementName(entry.getKey())+" "+StatCollector.translateToLocal("enchantment.level."+entry.getValue()));
}
}
}
public static final class LinkedItemStack<T extends Enum<T>> extends EnhancementList<T>{
private final ItemStack linkedIS;
public LinkedItemStack(Class<T> enumCls, ItemStack linkedIS){
super(enumCls);
this.linkedIS = linkedIS;
String enhancementData = NBT.item(linkedIS, false).getString("enhancements2");
if (!enhancementData.isEmpty())deserialize(enhancementData);
}
@Override
public void set(T enhancement, int level){
super.set(enhancement, level);
NBT.item(linkedIS, true).setString("enhancements2", serialize());
linkedIS.func_150996_a(EnhancementRegistry.getItemTransformation(linkedIS.getItem()));
}
@Override
public void upgrade(T enhancement){
super.upgrade(enhancement);
NBT.item(linkedIS, true).setString("enhancements2", serialize());
linkedIS.func_150996_a(EnhancementRegistry.getItemTransformation(linkedIS.getItem()));
}
@Override
public void replace(EnhancementList<T> replacement){
super.replace(replacement);
NBT.item(linkedIS, true).setString("enhancements2", serialize());
linkedIS.func_150996_a(EnhancementRegistry.getItemTransformation(linkedIS.getItem()));
}
}
}
|
#!/usr/bin/env bash
# Copyright 2013 Telefonica Investigacion y Desarrollo, S.A.U
#
# This file is part of fiware-keystone-scim (FI-WARE project).
#
# fiware-keystone-scim is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# fiware-keystone-scim is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
# General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with fiware-keystone-scim. If not, see http://www.gnu.org/licenses/.
#
# For those usages not covered by this license please contact with
# iot_support at tid dot es
# Bash lib to know the RPM version and revision from a GitHub repository
# Call method get_rpm_version_string to obtain them for rpmbuild
#
if [[ $(ps -hp $$ | grep bash) ]]; then
shopt -s extglob
elif [[ $(ps -hp $$ | grep zsh) ]]; then
setopt kshglob
fi
get_branch()
{
git rev-parse --abbrev-ref HEAD
}
## Specific functions according the TID workflow
get_branch_type()
{
local branch="$(get_branch)"
case $branch in
release/*) echo "release";;
develop) echo "develop";;
master) echo "stable";;
*) echo "other";;
esac
}
get_version_string()
{
local branch branch_name describe_tags version ancestor release
case $(get_branch_type) in
stable)
# # If we are on stable branch get last tag as the version, but transform to x.x.x-x-SHA1
# describe_tags="$(git describe --tags --long --match "[[:digit:]]*.[[:digit:]]*.[[:digit:]]*" 2>/dev/null)"
# version="${describe_tags%-*-*}"
# echo "${version%.*}-${version#*.*.*.}-$(git log --pretty=format:'%h' -1)"
## If we are in develop use the total count of commits of the repo
total_commit_number=$(git rev-list --all --count)
short_hash=$(git rev-parse --short HEAD)
version="$(git describe --tags --long --match "[[:digit:]]*.[[:digit:]]*.[[:digit:]]*" 2>/dev/null)"
version="${version%-*-*}"
version="${version%KO}"
echo "${version}-${total_commit_number}-${short_hash}"
;;
develop)
## If we are in develop use the total count of commits of the repo
total_commit_number=$(git rev-list --all --count)
short_hash=$(git rev-parse --short HEAD)
version="$(git describe --tags --long --match "[[:digit:]]*.[[:digit:]]*.[[:digit:]]*" 2>/dev/null)"
version="${version%-*-*}"
version="${version%KO}"
echo "${version}-${total_commit_number}-${short_hash}"
;;
release)
## in release branches the version is a tag named
branch_name="$(get_branch)"
branch_name="${branch_name#*/}"
describe_tags="$(git describe --tags --long --match "[[:digit:]]*.[[:digit:]]*.[[:digit:]]*" 2>/dev/null)"
version="${describe_tags%-*-*}"
version="${version%KO}"
release=${describe_tags#*.*.*-}
echo "${version}-${release}"
;;
other)
## We are in detached mode, use the last x-y-z tag
version="$(git describe --tags --long --match "[[:digit:]]*.[[:digit:]]*.[[:digit:]]*" 2>/dev/null)"
version="${version%-*-*}"
version="${version%KO}"
echo "${version}"
;;
*)
# RMs don't stablish any standard here, we use branch name as version
version=$(get_branch)
# Using always develop as parent branch does not describe correctly the number of revision
# for branches not starting there, but works as an incremental rev
ancestor="$(git merge-base $version develop)"
version=${version#*/}
local res="$(git log --oneline ${ancestor}.. --pretty='format:%h')"
## wc alone does not get the last line when there's no new line
[[ -z $res ]] && rel=0 || rel=$(echo "$res" | wc -l | tr -d ' ')
echo "${version}-${rel}-g$(git log --pretty=format:'%h' -1)"
esac
}
get_rpm_version_string() {
local version_string ver rel
version_string="$(get_version_string)"
ver="${version_string%-*-*}"
rel="${version_string:$((${#ver}+1))}"
echo "${ver//[[:space:]-\/#]}" "${rel//[-]/.}"
}
#cd $1
#get_version_string| cut -d "-" -f $2
|
package com.example.FormacionEjemplo1;
import javax.servlet.annotation.WebServlet;
import com.vaadin.annotations.Theme;
import com.vaadin.annotations.VaadinServletConfiguration;
import com.vaadin.server.VaadinRequest;
import com.vaadin.server.VaadinServlet;
import com.vaadin.ui.Button;
import com.vaadin.ui.Label;
import com.vaadin.ui.TextField;
import com.vaadin.ui.UI;
import com.vaadin.ui.VerticalLayout;
@Theme("mytheme")
public class FormacionEjemplo1 extends UI {
@Override
protected void init(VaadinRequest vaadinRequest) {
final VerticalLayout layout = new VerticalLayout();
final TextField name = new TextField();
name.setCaption("Type your name here:");
Button button = new Button("Click Me");
button.addClickListener(e -> {
layout.addComponent(new Label("Thanks " + name.getValue() + ", it works!"));
});
layout.addComponents(name, button);
setContent(layout);
}
@WebServlet(urlPatterns = "/*", name = "FormacionEjemplo1Servlet", asyncSupported = true)
@VaadinServletConfiguration(ui = FormacionEjemplo1.class, productionMode = false)
public static class FormacionEjemplo1Servlet extends VaadinServlet {
}
}
|
<gh_stars>0
/*
Copyright (c) 2015-2016, Apple Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder(s) nor the names of any contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// Finite state entropy coding (FSE)
// This is an implementation of the tANS algorithm described by <NAME>,
// we use the more descriptive name "Finite State Entropy".
#pragma once
#include <linux/stddef.h>
#include <linux/types.h>
#include <linux/string.h>
#include "lzfse.h"
#if defined(_MSC_VER) && !defined(__clang__)
# define FSE_INLINE __forceinline
# define inline __inline
# pragma warning(disable : 4068) // warning C4068: unknown pragma
#else
# define FSE_INLINE static inline __attribute__((__always_inline__))
#endif
// Mask the NBITS lsb of X. 0 <= NBITS < 64
static inline uint64_t fse_mask_lsb64(uint64_t x, fse_bit_count nbits) {
static const uint64_t mtable[65] = {
0x0000000000000000LLU, 0x0000000000000001LLU, 0x0000000000000003LLU,
0x0000000000000007LLU, 0x000000000000000fLLU, 0x000000000000001fLLU,
0x000000000000003fLLU, 0x000000000000007fLLU, 0x00000000000000ffLLU,
0x00000000000001ffLLU, 0x00000000000003ffLLU, 0x00000000000007ffLLU,
0x0000000000000fffLLU, 0x0000000000001fffLLU, 0x0000000000003fffLLU,
0x0000000000007fffLLU, 0x000000000000ffffLLU, 0x000000000001ffffLLU,
0x000000000003ffffLLU, 0x000000000007ffffLLU, 0x00000000000fffffLLU,
0x00000000001fffffLLU, 0x00000000003fffffLLU, 0x00000000007fffffLLU,
0x0000000000ffffffLLU, 0x0000000001ffffffLLU, 0x0000000003ffffffLLU,
0x0000000007ffffffLLU, 0x000000000fffffffLLU, 0x000000001fffffffLLU,
0x000000003fffffffLLU, 0x000000007fffffffLLU, 0x00000000ffffffffLLU,
0x00000001ffffffffLLU, 0x00000003ffffffffLLU, 0x00000007ffffffffLLU,
0x0000000fffffffffLLU, 0x0000001fffffffffLLU, 0x0000003fffffffffLLU,
0x0000007fffffffffLLU, 0x000000ffffffffffLLU, 0x000001ffffffffffLLU,
0x000003ffffffffffLLU, 0x000007ffffffffffLLU, 0x00000fffffffffffLLU,
0x00001fffffffffffLLU, 0x00003fffffffffffLLU, 0x00007fffffffffffLLU,
0x0000ffffffffffffLLU, 0x0001ffffffffffffLLU, 0x0003ffffffffffffLLU,
0x0007ffffffffffffLLU, 0x000fffffffffffffLLU, 0x001fffffffffffffLLU,
0x003fffffffffffffLLU, 0x007fffffffffffffLLU, 0x00ffffffffffffffLLU,
0x01ffffffffffffffLLU, 0x03ffffffffffffffLLU, 0x07ffffffffffffffLLU,
0x0fffffffffffffffLLU, 0x1fffffffffffffffLLU, 0x3fffffffffffffffLLU,
0x7fffffffffffffffLLU, 0xffffffffffffffffLLU,
};
return x & mtable[nbits];
}
// Mask the NBITS lsb of X. 0 <= NBITS < 32
static inline uint32_t fse_mask_lsb32(uint32_t x, fse_bit_count nbits) {
static const uint32_t mtable[33] = {
0x0000000000000000U, 0x0000000000000001U, 0x0000000000000003U,
0x0000000000000007U, 0x000000000000000fU, 0x000000000000001fU,
0x000000000000003fU, 0x000000000000007fU, 0x00000000000000ffU,
0x00000000000001ffU, 0x00000000000003ffU, 0x00000000000007ffU,
0x0000000000000fffU, 0x0000000000001fffU, 0x0000000000003fffU,
0x0000000000007fffU, 0x000000000000ffffU, 0x000000000001ffffU,
0x000000000003ffffU, 0x000000000007ffffU, 0x00000000000fffffU,
0x00000000001fffffU, 0x00000000003fffffU, 0x00000000007fffffU,
0x0000000000ffffffU, 0x0000000001ffffffU, 0x0000000003ffffffU,
0x0000000007ffffffU, 0x000000000fffffffU, 0x000000001fffffffU,
0x000000003fffffffU, 0x000000007fffffffU, 0x00000000ffffffffU,
};
return x & mtable[nbits];
}
/*! @abstract Select \c nbits at index \c start from \c x.
* 0 <= start <= start+nbits <= 64 */
FSE_INLINE uint64_t fse_extract_bits64(uint64_t x, fse_bit_count start,
fse_bit_count nbits) {
#if defined(__GNUC__)
// If START and NBITS are constants, map to bit-field extraction instructions
if (__builtin_constant_p(start) && __builtin_constant_p(nbits))
return (x >> start) & ((1LLU << nbits) - 1LLU);
#endif
// Otherwise, shift and mask
return fse_mask_lsb64(x >> start, nbits);
}
/*! @abstract Select \c nbits at index \c start from \c x.
* 0 <= start <= start+nbits <= 32 */
FSE_INLINE uint32_t fse_extract_bits32(uint32_t x, fse_bit_count start,
fse_bit_count nbits) {
#if defined(__GNUC__)
// If START and NBITS are constants, map to bit-field extraction instructions
if (__builtin_constant_p(start) && __builtin_constant_p(nbits))
return (x >> start) & ((1U << nbits) - 1U);
#endif
// Otherwise, shift and mask
return fse_mask_lsb32(x >> start, nbits);
}
/*! @abstract Initialize an output stream object. */
FSE_INLINE void fse_out_init64(fse_out_stream64 *s) {
s->accum = 0;
s->accum_nbits = 0;
}
/*! @abstract Initialize an output stream object. */
FSE_INLINE void fse_out_init32(fse_out_stream32 *s) {
s->accum = 0;
s->accum_nbits = 0;
}
/*! @abstract Write full bytes from the accumulator to output buffer, ensuring
* accum_nbits is in [0, 7].
* We assume we can write 8 bytes to the output buffer \c (*pbuf[0..7]) in all
* cases.
* @note *pbuf is incremented by the number of written bytes. */
FSE_INLINE void fse_out_flush64(fse_out_stream64 *s, uint8_t **pbuf) {
fse_bit_count nbits =
s->accum_nbits & -8; // number of bits written, multiple of 8
// Write 8 bytes of current accumulator
memcpy(*pbuf, &(s->accum), 8);
*pbuf += (nbits >> 3); // bytes
// Update state
s->accum >>= nbits; // remove nbits
s->accum_nbits -= nbits;
(s->accum_nbits >= 0 && s->accum_nbits <= 7);
(s->accum_nbits == 64 || (s->accum >> s->accum_nbits) == 0);
}
/*! @abstract Write full bytes from the accumulator to output buffer, ensuring
* accum_nbits is in [0, 7].
* We assume we can write 4 bytes to the output buffer \c (*pbuf[0..3]) in all
* cases.
* @note *pbuf is incremented by the number of written bytes. */
FSE_INLINE void fse_out_flush32(fse_out_stream32 *s, uint8_t **pbuf) {
fse_bit_count nbits =
s->accum_nbits & -8; // number of bits written, multiple of 8
// Write 4 bytes of current accumulator
memcpy(*pbuf, &(s->accum), 4);
*pbuf += (nbits >> 3); // bytes
// Update state
s->accum >>= nbits; // remove nbits
s->accum_nbits -= nbits;
(s->accum_nbits >= 0 && s->accum_nbits <= 7);
(s->accum_nbits == 32 || (s->accum >> s->accum_nbits) == 0);
}
/*! @abstract Write the last bytes from the accumulator to output buffer,
* ensuring accum_nbits is in [-7, 0]. Bits are padded with 0 if needed.
* We assume we can write 8 bytes to the output buffer \c (*pbuf[0..7]) in all
* cases.
* @note *pbuf is incremented by the number of written bytes. */
FSE_INLINE void fse_out_finish64(fse_out_stream64 *s, uint8_t **pbuf) {
fse_bit_count nbits =
(s->accum_nbits + 7) & -8; // number of bits written, multiple of 8
// Write 8 bytes of current accumulator
memcpy(*pbuf, &(s->accum), 8);
*pbuf += (nbits >> 3); // bytes
// Update state
s->accum = 0; // remove nbits
s->accum_nbits -= nbits;
(s->accum_nbits >= -7 && s->accum_nbits <= 0);
}
/*! @abstract Write the last bytes from the accumulator to output buffer,
* ensuring accum_nbits is in [-7, 0]. Bits are padded with 0 if needed.
* We assume we can write 4 bytes to the output buffer \c (*pbuf[0..3]) in all
* cases.
* @note *pbuf is incremented by the number of written bytes. */
FSE_INLINE void fse_out_finish32(fse_out_stream32 *s, uint8_t **pbuf) {
fse_bit_count nbits =
(s->accum_nbits + 7) & -8; // number of bits written, multiple of 8
// Write 8 bytes of current accumulator
memcpy(*pbuf, &(s->accum), 4);
*pbuf += (nbits >> 3); // bytes
// Update state
s->accum = 0; // remove nbits
s->accum_nbits -= nbits;
(s->accum_nbits >= -7 && s->accum_nbits <= 0);
}
/*! @abstract Accumulate \c n bits \c b to output stream \c s. We \b must have:
* 0 <= b < 2^n, and N + s->accum_nbits <= 64.
* @note The caller must ensure out_flush is called \b before the accumulator
* overflows to more than 64 bits. */
FSE_INLINE void fse_out_push64(fse_out_stream64 *s, fse_bit_count n,
uint64_t b) {
s->accum |= b << s->accum_nbits;
s->accum_nbits += n;
(s->accum_nbits >= 0 && s->accum_nbits <= 64);
(s->accum_nbits == 64 || (s->accum >> s->accum_nbits) == 0);
}
/*! @abstract Accumulate \c n bits \c b to output stream \c s. We \b must have:
* 0 <= n < 2^n, and n + s->accum_nbits <= 32.
* @note The caller must ensure out_flush is called \b before the accumulator
* overflows to more than 32 bits. */
FSE_INLINE void fse_out_push32(fse_out_stream32 *s, fse_bit_count n,
uint32_t b) {
s->accum |= b << s->accum_nbits;
s->accum_nbits += n;
(s->accum_nbits >= 0 && s->accum_nbits <= 32);
(s->accum_nbits == 32 || (s->accum >> s->accum_nbits) == 0);
}
#if FSE_IOSTREAM_64
#define DEBUG_CHECK_INPUT_STREAM_PARAMETERS \
(s->accum_nbits >= 56 && s->accum_nbits < 64); \
((s->accum >> s->accum_nbits) == 0);
#else
#define DEBUG_CHECK_INPUT_STREAM_PARAMETERS \
(s->accum_nbits >= 24 && s->accum_nbits < 32); \
((s->accum >> s->accum_nbits) == 0);
#endif
/*! @abstract Initialize the fse input stream so that accum holds between 56
* and 63 bits. We never want to have 64 bits in the stream, because that allows
* us to avoid a special case in the fse_in_pull function (eliminating an
* unpredictable branch), while not requiring any additional fse_flush
* operations. This is why we have the special case for n == 0 (in which case
* we want to load only 7 bytes instead of 8). */
FSE_INLINE int fse_in_checked_init64(fse_in_stream64 *s, fse_bit_count n,
const uint8_t **pbuf,
const uint8_t *buf_start) {
if (n) {
if (*pbuf < buf_start + 8)
return -1; // out of range
*pbuf -= 8;
memcpy(&(s->accum), *pbuf, 8);
s->accum_nbits = n + 64;
} else {
if (*pbuf < buf_start + 7)
return -1; // out of range
*pbuf -= 7;
memcpy(&(s->accum), *pbuf, 7);
s->accum &= 0xffffffffffffff;
s->accum_nbits = n + 56;
}
if ((s->accum_nbits < 56 || s->accum_nbits >= 64) ||
((s->accum >> s->accum_nbits) != 0)) {
return -1; // the incoming input is wrong (encoder should have zeroed the
// upper bits)
}
return 0; // OK
}
/*! @abstract Identical to previous function, but for 32-bit operation
* (resulting bit count is between 24 and 31 bits). */
FSE_INLINE int fse_in_checked_init32(fse_in_stream32 *s, fse_bit_count n,
const uint8_t **pbuf,
const uint8_t *buf_start) {
if (n) {
if (*pbuf < buf_start + 4)
return -1; // out of range
*pbuf -= 4;
memcpy(&(s->accum), *pbuf, 4);
s->accum_nbits = n + 32;
} else {
if (*pbuf < buf_start + 3)
return -1; // out of range
*pbuf -= 3;
memcpy(&(s->accum), *pbuf, 3);
s->accum &= 0xffffff;
s->accum_nbits = n + 24;
}
if ((s->accum_nbits < 24 || s->accum_nbits >= 32) ||
((s->accum >> s->accum_nbits) != 0)) {
return -1; // the incoming input is wrong (encoder should have zeroed the
// upper bits)
}
return 0; // OK
}
/*! @abstract Read in new bytes from buffer to ensure that we have a full
* complement of bits in the stream object (again, between 56 and 63 bits).
* checking the new value of \c *pbuf remains >= \c buf_start.
* @return 0 if OK.
* @return -1 on failure. */
FSE_INLINE int fse_in_checked_flush64(fse_in_stream64 *s, const uint8_t **pbuf,
const uint8_t *buf_start) {
// Get number of bits to add to bring us into the desired range.
fse_bit_count nbits = (63 - s->accum_nbits) & -8;
// Convert bits to bytes and decrement buffer address, then load new data.
const uint8_t *buf = (*pbuf) - (nbits >> 3);
if (buf < buf_start) {
return -1; // out of range
}
*pbuf = buf;
uint64_t incoming;
memcpy(&incoming, buf, 8);
// Update the state object and verify its validity (in DEBUG).
s->accum = (s->accum << nbits) | fse_mask_lsb64(incoming, nbits);
s->accum_nbits += nbits;
DEBUG_CHECK_INPUT_STREAM_PARAMETERS
return 0; // OK
}
/*! @abstract Identical to previous function (but again, we're only filling
* a 32-bit field with between 24 and 31 bits). */
FSE_INLINE int fse_in_checked_flush32(fse_in_stream32 *s, const uint8_t **pbuf,
const uint8_t *buf_start) {
// Get number of bits to add to bring us into the desired range.
fse_bit_count nbits = (31 - s->accum_nbits) & -8;
if (nbits > 0) {
// Convert bits to bytes and decrement buffer address, then load new data.
const uint8_t *buf = (*pbuf) - (nbits >> 3);
if (buf < buf_start) {
return -1; // out of range
}
*pbuf = buf;
uint32_t incoming = *((uint32_t *)buf);
// Update the state object and verify its validity (in DEBUG).
s->accum = (s->accum << nbits) | fse_mask_lsb32(incoming, nbits);
s->accum_nbits += nbits;
}
DEBUG_CHECK_INPUT_STREAM_PARAMETERS
return 0; // OK
}
/*! @abstract Pull n bits out of the fse stream object. */
FSE_INLINE uint64_t fse_in_pull64(fse_in_stream64 *s, fse_bit_count n) {
(n >= 0 && n <= s->accum_nbits);
s->accum_nbits -= n;
uint64_t result = s->accum >> s->accum_nbits;
s->accum = fse_mask_lsb64(s->accum, s->accum_nbits);
return result;
}
/*! @abstract Pull n bits out of the fse stream object. */
FSE_INLINE uint32_t fse_in_pull32(fse_in_stream32 *s, fse_bit_count n) {
(n >= 0 && n <= s->accum_nbits);
s->accum_nbits -= n;
uint32_t result = s->accum >> s->accum_nbits;
s->accum = fse_mask_lsb32(s->accum, s->accum_nbits);
return result;
}
/*! @abstract Entry for one symbol in the encoder table (64b). */
typedef struct {
int16_t s0; // First state requiring a K-bit shift
int16_t k; // States S >= S0 are shifted K bits. States S < S0 are
// shifted K-1 bits
int16_t delta0; // Relative increment used to compute next state if S >= S0
int16_t delta1; // Relative increment used to compute next state if S < S0
} fse_encoder_entry;
/*! @abstract Entry for one state in the decoder table (32b). */
typedef struct { // DO NOT REORDER THE FIELDS
int8_t k; // Number of bits to read
uint8_t symbol; // Emitted symbol
int16_t delta; // Signed increment used to compute next state (+bias)
} fse_decoder_entry;
/*! @abstract Encode SYMBOL using the encoder table, and update \c *pstate,
* \c out.
* @note The caller must ensure we have enough bits available in the output
* stream accumulator. */
FSE_INLINE void fse_encode(fse_state *pstate,
const fse_encoder_entry *encoder_table,
fse_out_stream *out, uint8_t symbol) {
int s = *pstate;
fse_encoder_entry e = encoder_table[symbol];
int s0 = e.s0;
int k = e.k;
int delta0 = e.delta0;
int delta1 = e.delta1;
// Number of bits to write
int hi = s >= s0;
fse_bit_count nbits = hi ? k : (k - 1);
fse_state delta = hi ? delta0 : delta1;
// Write lower NBITS of state
fse_bits b = fse_mask_lsb(s, nbits);
fse_out_push(out, nbits, b);
// Update state with remaining bits and delta
*pstate = delta + (s >> nbits);
}
/*! @abstract Decode and return symbol using the decoder table, and update
* \c *pstate, \c in.
* @note The caller must ensure we have enough bits available in the input
* stream accumulator. */
FSE_INLINE uint8_t fse_decode(fse_state *pstate,
const int32_t *decoder_table,
fse_in_stream *in) {
int32_t e = decoder_table[*pstate];
// Update state from K bits of input + DELTA
*pstate = (fse_state)(e >> 16) + (fse_state)fse_in_pull(in, e & 0xff);
// Return the symbol for this state
return fse_extract_bits(e, 8, 8); // symbol
}
/*! @abstract Decode and return value using the decoder table, and update \c
* *pstate, \c in.
* \c value_decoder_table[nstates]
* @note The caller must ensure we have enough bits available in the input
* stream accumulator. */
FSE_INLINE int32_t
fse_value_decode(fse_state *pstate,
const fse_value_decoder_entry *value_decoder_table,
fse_in_stream *in) {
fse_value_decoder_entry entry = value_decoder_table[*pstate];
uint32_t state_and_value_bits = (uint32_t)fse_in_pull(in, entry.total_bits);
*pstate =
(fse_state)(entry.delta + (state_and_value_bits >> entry.value_bits));
return (int32_t)(entry.vbase +
fse_mask_lsb(state_and_value_bits, entry.value_bits));
}
// MARK: - Tables
// IMPORTANT: To properly decode an FSE encoded stream, both encoder/decoder
// tables shall be initialized with the same parameters, including the
// FREQ[NSYMBOL] array.
//
/*! @abstract Sanity check on frequency table, verify sum of \c freq
* is <= \c number_of_states. */
FSE_INLINE int fse_check_freq(const uint16_t *freq_table,
const size_t table_size,
const size_t number_of_states) {
size_t sum_of_freq = 0;
int i;
for (i = 0; i < table_size; i++) {
sum_of_freq += freq_table[i];
}
return (sum_of_freq > number_of_states) ? -1 : 0;
}
/*! @abstract Initialize encoder table \c t[nsymbols].
*
* @param nstates
* sum \c freq[i]; the number of states (a power of 2).
*
* @param nsymbols
* the number of symbols.
*
* @param freq[nsymbols]
* is a normalized histogram of symbol frequencies, with \c freq[i] >= 0.
* Some symbols may have a 0 frequency. In that case they should not be
* present in the data.
*/
void fse_init_encoder_table(int nstates, int nsymbols,
const uint16_t *freq,
fse_encoder_entry *t);
/*! @abstract Initialize decoder table \c t[nstates].
*
* @param nstates
* sum \c freq[i]; the number of states (a power of 2).
*
* @param nsymbols
* the number of symbols.
*
* @param feq[nsymbols]
* a normalized histogram of symbol frequencies, with \c freq[i] >= 0.
* Some symbols may have a 0 frequency. In that case they should not be
* present in the data.
*
* @return 0 if OK.
* @return -1 on failure.
*/
int fse_init_decoder_table(int nstates, int nsymbols,
const uint16_t *freq,
int32_t *t);
/*! @abstract Initialize value decoder table \c t[nstates].
*
* @param nstates
* sum \cfreq[i]; the number of states (a power of 2).
*
* @param nsymbols
* the number of symbols.
*
* @param freq[nsymbols]
* a normalized histogram of symbol frequencies, with \c freq[i] >= 0.
* \c symbol_vbits[nsymbols] and \c symbol_vbase[nsymbols] are the number of
* value bits to read and the base value for each symbol.
* Some symbols may have a 0 frequency. In that case they should not be
* present in the data.
*/
void fse_init_value_decoder_table(int nstates, int nsymbols,
const uint16_t *freq,
const uint8_t *symbol_vbits,
const int32_t *symbol_vbase,
fse_value_decoder_entry *t);
/*! @abstract Normalize a table \c t[nsymbols] of occurrences to
* \c freq[nsymbols]. */
void fse_normalize_freq(int nstates, int nsymbols, const uint32_t *t,
uint16_t *freq);
|
package com.breakersoft.plow.crond;
import java.util.List;
import java.util.UUID;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import com.breakersoft.plow.ExitStatus;
import com.breakersoft.plow.Signal;
import com.breakersoft.plow.Task;
import com.breakersoft.plow.dispatcher.DispatchService;
import com.breakersoft.plow.dispatcher.domain.DispatchProc;
import com.breakersoft.plow.service.JobService;
import com.breakersoft.plow.thrift.TaskState;
/**
*
* Cleans up orphaned procs.
*
* @author chambers
*
*/
public class OrphanProcChecker extends AbstractCrondTask {
private static final Logger logger = LoggerFactory.getLogger(OrphanProcChecker.class);
@Autowired
DispatchService dispatchService;
@Autowired
JobService jobService;
public OrphanProcChecker() {
super(CrondTask.ORPHAN_PROC_CHECK);
}
protected void run() {
final List<DispatchProc> procs = dispatchService.getOrphanProcs();
logger.info("Orphan proc checker found {} orphan procs.", procs.size());
for (DispatchProc proc: procs) {
try {
final UUID taskId = proc.getTaskId();
if (proc.getTaskId() != null) {
final Task task = jobService.getTask(taskId);
logger.warn("Found orphaned {}", task);
dispatchService.stopTask(task, TaskState.WAITING,
ExitStatus.FAIL, Signal.ORPANED_TASK);
}
logger.warn("Deallocating orphan {}", proc);
dispatchService.deallocateProc(proc, "orphaned");
} catch (Exception e) {
logger.warn("Failed to handled orphaned proc, " + e.getMessage(), e);
}
}
}
}
|
docker_psa() {
clear
ps_result=$(docker ps --all --format "table {{.Image}}\t{{.Names}}\t{{.Ports}}\t{{.Status}}")
ps_table_header=$(echo "${ps_result}" | head --lines=1)
ps_table_rows_up=$(
echo "${ps_result}" |
tail --lines=+2 |
\grep "Up" |
# List running instances first.
sort --ignore-leading-blanks --version-sort --key=4 --key=2 --key=1
)
ps_table_rows_exited=$(
echo "${ps_result}" |
tail --lines=+2 |
\grep "Exited" |
# List running instances first.
sort --ignore-leading-blanks --version-sort --key=4 --key=2 --key=1
)
i=0
echo "${ps_table_header}"
echo "${ps_table_rows_up}" | while read row; do
if [ $(( $i % 2 )) -eq 0 ]; then
echo -e "\e[48;5;235m${row}\e[0m"
else
echo -e "\e[48;5;232m${row}\e[0m"
fi
((i+=1))
done
echo "${ps_table_rows_exited}" | while read row; do
echo -e "\e[2;40;97m${row}\e[0m"
((i+=1))
done
echo
images_result="$(docker images)"
images_table_header=$(echo "${images_result}" | head --lines=1)
images_table_rows=$(
echo "${images_result}" |
tail --lines=+2
)
echo "${images_table_header}"
echo "${images_table_rows}" | while read row; do
if [ $(( $i % 2 )) -eq 0 ]; then
echo -e "\e[48;5;235m${row}\e[0m"
else
echo -e "\e[48;5;232m${row}\e[0m"
fi
((i+=1))
done
}
docker_image_prune() {
# Use `docker image prune --all' for removing dangling and ununsed images
# (images not referenced by any container).
docker image prune --all
}
alias attach="docker attach"
alias dc="docker"
alias doc="docker"
alias img="clear; docker images; echo; docker ps -a"
alias pause="docker pause"
alias prune="docker_image_prune"
alias psa="docker_psa"
alias rmi="clear; docker rmi"
alias stop="docker stop"
|
package org.multibit.hd.core.files;
public class SecureFilesTest {
// TODO (GR) Verify operation
public void testSecureDelete() {
}
}
|
def searchElement(arr, element):
for i in range(len(arr)):
if arr[i] == element:
return i
return -1 |
<filename>cyder/cydns/search_utils.py
from django.db.models import Q
from django.core.exceptions import ObjectDoesNotExist
def smart_fqdn_exists(fqdn, *args, **kwargs):
"""
Searching for a fqdn by actually looking at a fqdn is very inefficient.
Instead we should:
1) Look for a domain with the name of fqdn.
2) Look for a label = fqdn.split('.')[0]
and domain = fqdn.split('.')[1:]
"""
from cyder.cydns.domain.models import Domain
# Try approach 1
try:
search_domain = Domain.objects.get(name=fqdn)
label = ''
except ObjectDoesNotExist:
search_domain = None
if search_domain:
for type_, qset in _build_label_domain_queries(
label, search_domain, **kwargs):
if qset.exists():
return qset
# Try approach 2
search_domain = None
if len(fqdn.split('.')) == 1:
return None
try:
label = fqdn.split('.')[0]
domain_name = '.'.join(fqdn.split('.')[1:])
search_domain = Domain.objects.get(
name=domain_name)
except ObjectDoesNotExist:
search_domain = None
if search_domain:
for type_, qset in _build_label_domain_queries(
label, search_domain, **kwargs):
if qset.exists():
return qset
def _build_label_domain_queries(label, domain, mx=True, sr=True, tx=True,
cn=True, ar=True, intr=True, ns=True, ss=True):
# We import this way to make it easier to import this file without
# getting cyclic imports.
qsets = []
if mx:
from cyder.cydns.mx.models import MX
qsets.append(('MX', MX.objects.
filter(label=label, domain=domain)))
if ns:
if label == '':
from cyder.cydns.nameserver.models import Nameserver
qsets.append(('NS', Nameserver.objects.
filter(domain=domain)))
if sr:
from cyder.cydns.srv.models import SRV
qsets.append(('SRV', SRV.objects.
filter(label=label, domain=domain)))
if tx:
from cyder.cydns.txt.models import TXT
qsets.append(('TXT', TXT.objects.
filter(label=label, domain=domain)))
if ss:
from cyder.cydns.sshfp.models import SSHFP
qsets.append(('SSHFP', SSHFP.objects.
filter(label=label, domain=domain)))
if cn:
from cyder.cydns.cname.models import CNAME
qsets.append(('CNAME', CNAME.objects.
filter(label=label, domain=domain)))
if ar:
from cyder.cydns.address_record.models import AddressRecord
ars = AddressRecord.objects.filter(label=label, domain=domain)
qsets.append(('AddressRecord', ars))
if intr:
from cyder.cydhcp.interface.static_intr.models import StaticInterface
intrs = StaticInterface.objects.filter(label=label, domain=domain)
qsets.append(('AddressRecord', intrs))
return qsets
|
#!/bin/bash -e
IMAGE="xeon-centos76-analytics-ffmpeg"
VERSION="20.1"
DIR=$(dirname $(readlink -f "$0"))
. "${DIR}/../../../../script/build.sh"
|
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#project directory
BASE_DIR=$(cd "$(dirname "$0")"/../;pwd)
AS_USER=`whoami`
export LOG_DIR="$BASE_DIR/logs"
mkdir -p $LOG_DIR
chown -R $AS_USER $LOG_DIR
# find java home
if [ -z "$JAVA_HOME" ]; then
export JAVA=$(which java)
export JPS=$(which jps)
else
export JAVA="$JAVA_HOME/bin/java"
export JPS="$JAVA_HOME/bin/jps"
fi
if [ -z "$AGENT_JVM_HEAP_OPTS" ]; then
HEAP_OPTS="-Xms512m -Xmx6656m"
else
HEAP_OPTS="$AGENT_JVM_HEAP_OPTS"
fi
GC_OPTS="-XX:SurvivorRatio=6 -XX:+UseMembar -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:+CMSScavengeBeforeRemark -XX:ParallelCMSThreads=3 -XX:+TieredCompilation -XX:+UseCMSCompactAtFullCollection -verbose:gc -Xloggc:$BASE_DIR/logs/gc.log.`date +%Y-%m-%d-%H-%M-%S` -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=$BASE_DIR/logs/ -XX:+CMSClassUnloadingEnabled -XX:CMSInitiatingOccupancyFraction=60 -XX:CMSFullGCsBeforeCompaction=1 -Dsun.net.inetaddr.ttl=3 -Dsun.net.inetaddr.negative.ttl=1 -Djava.net.preferIPv4Stack=true"
AGENT_JVM_ARGS="$HEAP_OPTS $GC_OPTS"
# Add Agent Rmi Args when necessary
AGENT_RMI_ARGS="-Dcom.sun.management.jmxremote \
-Dcom.sun.management.jmxremote.port=18080 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false"
CONFIG_DIR=${BASE_DIR}"/conf/"
JAR_LIBS=${BASE_DIR}"/lib/*"
CLASSPATH=${CONFIG_DIR}:${JAR_LIBS}
JMX_ENABLED=$(grep -c "agent.prometheus.enable=false" $BASE_DIR/conf/agent.properties)
if [[ $JMX_ENABLED == 1 ]]; then
export AGENT_ARGS="$AGENT_JVM_ARGS $AGENT_RMI_ARGS -cp $CLASSPATH -Dagent.home=$BASE_DIR"
else
export AGENT_ARGS="$AGENT_JVM_ARGS -cp $CLASSPATH -Dagent.home=$BASE_DIR"
fi |
#define HAS_VTK 1
#include "LaShellShellIntersection.h"
#include <numeric>
/*
* Author:
* Dr. <NAME>
* Department of Biomedical Engineering, King's College London
* Email: rashed 'dot' <EMAIL>
* Copyright (c) 2017
*
*
* Intersection of vertex normals of source mesh with a target mesh. After intersection, copies the scalar values of the target to the source
* The output is a mesh with same toplogy as the source but containing values from the intersection
*
*/
int main(int argc, char * argv[])
{
char* input_f1, *input_f2, *output_f;
int direction = 1;
bool foundArgs1 = false, foundArgs2 = false, foundArgs3=false;
if (argc >= 1)
{
for (int i = 1; i < argc; i++) {
if (i + 1 != argc) {
if (string(argv[i]) == "-i1") {
input_f1 = argv[i + 1];
foundArgs1 = true;
}
else if (string(argv[i]) == "-i2") {
input_f2 = argv[i + 1];
foundArgs2 = true;
}
else if (string(argv[i]) == "-o") {
output_f = argv[i + 1];
foundArgs3 = true;
}
}
else if (string(argv[i]) == "--reverse") {
direction = -1;
}
}
}
if (!(foundArgs1 && foundArgs2 && foundArgs3))
{
cerr << "Cheeck your parameters\n\nUsage:"
"\nCopies the target scalars to source\nbased on source vertex normal intersection with target\n\n"
"\n(Mandatory)\n\t-i1 <source_mesh_vtk> \n\t-i2 <target_mesh_vtk> \n\t-o <output_vtk>\n====Optional======\n\n\t--reverse <reverse the direction of intersection search>" << endl;
exit(1);
}
else
{
LaShell* source = new LaShell(input_f1);
LaShell* target = new LaShell(input_f2);
LaShell* la_out = new LaShell(input_f2);
LaShellShellIntersection* wt = new LaShellShellIntersection();
wt->SetInputData(source);
wt->SetInputData2(target);
wt->SetMapIntersectionToCopyScalar();
if (direction < 0) {
cout << "\n\nImportant: Computing intersection in the reverse direction to surface normals pointing outwards" << endl;
wt->SetDirectionToOppositeNormal();
}
else {
cout << "\n\nComputing intersection in surface normal direction (pointing outwards)" << endl;
}
wt->Update();
la_out = wt->GetOutput();
la_out->ExportVTK(output_f);
}
} |
function domSearch(selector, isCaseSensitive) {
let mainContainer = $(selector);
let addControlsDiv = $("<div>")
.addClass("add-controls")
.appendTo(mainContainer);
let addLabel = $("<label>")
.text("Enter text: ")
.appendTo(addControlsDiv);
let addInput = $("<input>")
.appendTo(addLabel);
let addButton = $("<a>")
.text("Add")
.addClass("button")
.css("display", "inline-block")
.on("click", addItem)
.appendTo(addControlsDiv);
let searchControlsDiv = $("<div>")
.addClass("search-controls")
.appendTo(mainContainer);
let searchLabel = $("<label>")
.text("Search: ")
.appendTo(searchControlsDiv);
let searchInput = $("<input>")
.appendTo(searchLabel)
.on("input", searchItem);
let resultControlsDiv = $("<div>")
.addClass("result-controls")
.appendTo(mainContainer);
let itemList = $("<ul>")
.addClass("items-list")
.appendTo(resultControlsDiv);
function addItem() {
let li = $("<li>")
.addClass("list-item")
.appendTo(itemList);
$("<a>")
.addClass("button")
.text("X")
.on("click", function () {
$(this).parent().remove();
})
.appendTo(li);
$("<strong>")
.text(addInput.val())
.appendTo(li);
addInput.val("");
}
function searchItem() {
let allElements = $("ul li");
allElements.css("display", "block");
if (isCaseSensitive) {
allElements.toArray().forEach(li => {
if(!$(li).text().includes(searchInput.val())){
$(li).css("display", "none");
}else {
$(li).css("display", "block");
}
})
} else {
allElements.toArray().forEach(li => {
if(!$(li).text().toLowerCase().includes(searchInput.val().toLowerCase())){
$(li).css("display", "none");
}else {
$(li).css("display", "block");
}
})
}
if (searchInput.val().length === 0) {
allElements.css("display", "block");
}
}
}
|
<gh_stars>0
from setuptools import setup, find_packages
setup(
name="clean_folder",
version="0.0.1",
author="VVP",
author_email="<EMAIL>",
description="goit hw 8",
install_requires=[
'six==1.15.0',
'transliterate == 1.10.2'
],
entry_points={
'console_scripts': [
'clean-folder=clean_folder.clean:main'
]
},
# url="https://github.com/vvpwork/goit-python/tree/main/lesson_7",
packages=find_packages(include=['clean_folder', 'clean_folder.clean']),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
)
|
package net.archwill.play.redis
import javax.inject.Provider
import com.typesafe.config.Config
import play.api.inject.{Binding, Module}
import play.api.{Configuration, Environment}
import redis.clients.jedis.{JedisPool, JedisPoolConfig}
private[redis] abstract class BaseRedisModule extends Module {
override def bindings(environment: Environment, configuration: Configuration): Seq[Binding[_]] = Seq(
bind[RedisConfig].to(new RedisConfigProvider(configuration.underlying.getConfig("redis"))),
bind[JedisPool].toProvider[JedisPoolProvider],
bind[RedisLocalCache].toSelf
)
}
private[redis] class RedisConfigProvider(config: Config) extends Provider[RedisConfig] {
override lazy val get: RedisConfig = RedisConfig(
host = config.getString("host"),
port = config.getInt("port"),
timeout = config.getDuration("timeout"),
password = if (!config.getIsNull("password")) Some(config.getString("password")) else None,
database = config.getInt("database"),
poolConfig = {
val c = new JedisPoolConfig
c.setMinIdle(config.getInt("pool.min-idle"))
c.setMaxIdle(config.getInt("pool.max-idle"))
c.setMaxTotal(config.getInt("pool.max-total"))
c
},
compressThreshold = config.getBytes("compress-threshold").toInt,
localCache = RedisLocalCacheConfig(
maxSize = config.getInt("local-cache.max-size"),
expiration = {
if (!config.getIsNull("local-cache.expiration"))
Some(config.getDuration("local-cache.expiration"))
else
None
}
)
)
}
|
<gh_stars>0
#ifndef THREADS_H
#define THREADS_H
#include "q.h"
#include "TCB.h"
// Global queue of TCBs
q_element *ReadyQ;
// Global pointer to thread under execution
q_element *Curr_Thread;
// Global thread id count
int id_count = 0;
// Get ID of thread
int get_id (q_element *thread)
{
return ((TCB_t*)(thread->payload))->thread_id;
// thread : pointer to q_element
// thread->payload : void pointer to tcb
// (TCB_t*)(thread->payload) : TCB_t pointer to tcb
// ((TCB_t*)(thread->payload))->thread_id : id
};
// Get pointer to context from thread
ucontext_t *get_context_ptr(q_element *item)
{
return &(((TCB_t*)(item->payload))->context);
// item : pointer to q_element
// item->payload : void pointer to tcb
// (TCB_t*)(item->payload) : TCB_t pointer to tcb
// ((TCB_t*)(item->payload))->context : context that exists in TCB
// &(((TCB_t*)(item->payload))->context) : pointer to context
};
void start_thread(void *function, void *args)
{
// allocate a stack (via malloc) of a certain size (choose 8192)
int stack_size = 8192;
void *stack = malloc(stack_size);
// allocate a TCB (via malloc)
TCB_t *tcb = (TCB_t*) malloc(sizeof(TCB_t));
q_element *thread = NewItem();
thread->payload = (void*) tcb;
// call init_TCB with appropriate arguments
init_TCB(tcb, function, args, stack, stack_size);
// Add a thread_id (use a counter)
tcb->thread_id = id_count;
id_count++;
// call addQ to add this TCB into the "ReadyQ" which is a global head pointer
AddQueue(&ReadyQ, thread);
};
void run()
{
Curr_Thread = DelQueue(&ReadyQ);
ucontext_t parent; // get a place to store the main context, for faking
getcontext(&parent); // magic sauce
swapcontext(&parent, get_context_ptr(Curr_Thread)); // start the first round
};
void yield() // similar to run
{
q_element *Prev_Thread;
AddQueue(&ReadyQ, Curr_Thread);
Prev_Thread = Curr_Thread;
Curr_Thread = DelQueue(&ReadyQ);
// swap the context, from Prev_Thread to the thread pointed to Curr_Thread
swapcontext(get_context_ptr(Prev_Thread), get_context_ptr(Curr_Thread));
};
#endif
|
import java.util.Random;
import java.util.UUID;
public class chess {
private static String word() {
String a = UUID.randomUUID().toString();
return a.substring(0, 1);
}
public static String[][] generateChessboard() {
String[][] chessboard = new String[8][8];
// Initialize black pieces
chessboard[0] = new String[]{"R", "N", "B", "Q", "K", "B", "N", "R"};
for (int i = 0; i < 8; i++) {
chessboard[1][i] = "P";
}
// Initialize empty squares
for (int i = 2; i < 6; i++) {
for (int j = 0; j < 8; j++) {
chessboard[i][j] = " ";
}
}
// Initialize white pieces
chessboard[7] = new String[]{"R", "N", "B", "Q", "K", "B", "N", "R"};
for (int i = 0; i < 8; i++) {
chessboard[6][i] = "P";
}
return chessboard;
}
public static void printChessboard(String[][] chessboard) {
for (int i = 0; i < 8; i++) {
for (int j = 0; j < 8; j++) {
System.out.print(chessboard[i][j] + " ");
}
System.out.println();
}
}
public static void main(String[] args) {
String[][] generatedChessboard = generateChessboard();
printChessboard(generatedChessboard);
}
} |
// Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License")
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package predicates
import (
. "github.com/hazelcast/hazelcast-go-client/serialization"
)
type PredicateFactory struct {
}
func NewPredicateFactory() *PredicateFactory {
return &PredicateFactory{}
}
func (pf *PredicateFactory) Create(id int32) IdentifiedDataSerializable {
switch id {
case SQL_PREDICATE:
return &SqlPredicate{}
case AND_PREDICATE:
return &AndPredicate{}
case BETWEEN_PREDICATE:
return &BetweenPredicate{}
case EQUAL_PREDICATE:
return &EqualPredicate{}
case GREATERLESS_PREDICATE:
return &GreaterLessPredicate{}
case LIKE_PREDICATE:
return &LikePredicate{}
case ILIKE_PREDICATE:
return &ILikePredicate{}
case IN_PREDICATE:
return &InPredicate{}
case INSTANCEOF_PREDICATE:
return &InstanceOfPredicate{}
case NOTEQUAL_PREDICATE:
return &NotEqualPredicate{}
case NOT_PREDICATE:
return &NotPredicate{}
case OR_PREDICATE:
return &OrPredicate{}
case REGEX_PREDICATE:
return &RegexPredicate{}
case FALSE_PREDICATE:
return &FalsePredicate{}
case TRUE_PREDICATE:
return &TruePredicate{}
default:
return nil
}
}
|
<reponame>tildeio/ember-template-string-interpolation
module.exports = function(env) {
let { builders: b, parse } = env.syntax;
return {
name: 'StringInterpolationPlugin',
visitor: {
MustacheStatement(node) {
if (isInterpolatedString(node.path)) {
let value = node.path.value;
let result = parse(`<p foo="${value}"/>`);
let element = result.body[0]; // <p>
let foo = element.attributes[0]; // <p foo=?>
let { parts } = foo.value;
let params = parts.map(node => {
switch (node.type) {
case 'MustacheStatement':
return node.path;
case 'TextNode':
return b.string(node.chars);
default:
throw new Error(`Don't know how to handle ${node.type} yet`);
}
});
return b.mustache(
/* path */ b.path('concat', node.loc),
/* params */ params,
/* hash */ undefined,
/* raw */ node.raw,
/* loc */ node.loc,
/* strip */ node.strip,
);
}
},
StringLiteral(node) {
if (isInterpolatedString(node)) {
let { value } = node;
let result = parse(`<p foo="${value}"/>`);
let element = result.body[0]; // <p>
let foo = element.attributes[0]; // <p foo=?>
let { parts } = foo.value;
let params = parts.map(node => {
switch (node.type) {
case 'MustacheStatement':
return node.path;
case 'TextNode':
return b.string(node.chars);
default:
throw new Error(`Don't know how to handle ${node.type} yet`);
}
});
return b.sexpr(
/* path */ b.path('concat', node.loc),
/* params */ params,
/* hash */ undefined,
/* loc */ node.loc
);
}
}
},
};
};
function isInterpolatedString(node) {
return node.type === 'StringLiteral' && node.value.match(/{{.+}}/);
}
|
<reponame>easypizi/heroku-node-telegram-bot<filename>bot.js
const token = process.env.TOKEN;
const SteamAPI = require("steamapi");
const Bot = require("node-telegram-bot-api");
let bot;
let allWeaponsID = {
1: "Desert Eagle",
2: "Dual Berettas",
3: "Five-SeveN",
4: "Glock-18",
7: "AK-47",
8: "AUG",
9: "AWP",
10: "FAMAS",
11: "G3SG1",
13: "Galil AR",
14: "M249",
16: "M4A4",
17: "MAC-10",
19: "P90",
23: "MP5-SD",
24: "UMP-45",
25: "XM1014",
26: "PP-Bizon",
27: "MAG-7",
28: "Negev",
29: "Sawed-Off",
30: "Tec-9",
31: "Zeus x27",
32: "P2000",
33: "MP7",
34: "MP9",
35: "Nova",
36: "P250",
38: "SCAR-20",
39: "SG 553",
40: "SSG 08",
41: "Knife",
42: "Knife",
43: "Flashbang",
44: "High Explosive Grenade",
45: "Smoke Grenade",
46: "Molotov",
47: "Decoy Grenade",
48: "Incendiary Grenade",
49: "C4 Explosive",
59: "Knife",
60: "M4A1-S",
61: "USP-S",
63: "CZ75-Auto",
64: "R8 Revolver",
74: "Knife",
500: "Bayonet",
503: "Classic Knife",
505: "Flip Knife",
506: "Gut Knife",
507: "Karambit",
508: "M9 Bayonet",
509: "Huntsman Knife",
512: "Falchion Knife",
514: "Bowie Knife",
515: "Butterfly Knife",
516: "Shadow Daggers",
517: "Paracord Knife",
518: "Survival Knife",
519: "Ursus Knife",
520: "Navaja Knife",
521: "Nomad Knife",
522: "Stiletto Knife",
523: "Talon Knife",
525: "Skeleton Knife",
};
if (process.env.NODE_ENV === "production") {
bot = new Bot(token);
bot.setWebHook(process.env.HEROKU_URL + bot.token);
} else {
bot = new Bot(token, { polling: true });
}
const steam = new SteamAPI(process.env.STEAM);
const allGuns = {
pistols: ["glock", "fiveseven", "p250", "tec9", "deagle", "hkp2000"],
rifles: ["famas", "galilar", "ak47", "aug", "sg556", "m4a1"],
sniper: ["awp", "g3sg1", "scar20", "ssg08"],
shotgun: ["xm1014", "nova", "sawedoff", "mag7"],
smallgun: ["mac10", "p90", "ump45", "mp7", "mp9", "bizon"],
heavy: ["m249", "negev"],
};
console.log("Bot server started in the " + process.env.NODE_ENV + " mode");
let StateMachine = {
WELCOME: "welcome",
REQUEST: "request",
RESPONSE: "response",
EXIT: "exit",
};
this.steamID = "";
this.accuracy = "";
this.summary = {};
this.adr = 0;
this.waitingForID = false;
this.nickname = "";
this.stattrak = [];
bot.on("polling_error", (error) => {
console.log(error); // => 'EFATAL'
});
bot.on("text", (msg) => {
const name = msg.from.first_name;
// HELP command
if (msg.text.includes("/help")) {
bot.sendMessage(
msg.chat.id,
`Hello ${name}! This is a list of all possible commands:
/getkdr - Get your kill damage ratio and total average accuracy.
/getbest - Get best weapon in each category, depends on kill.
/last - get stats of your last match result.
/stattrak - get all kill stats.
/reset - Drop off all search data.
`
);
return;
}
// Authorization
if (this.steamID === "") {
if (this.waitingForID) {
steam
.resolve(`https://steamcommunity.com/id/${msg.text}`)
.then((id) => {
console.log("ID:", id);
if (typeof id === "string" && id !== undefined && id !== null) {
this.waitingForID = false;
this.steamID = id;
this.nickname = msg.text;
} else {
return;
}
getUserData(this.steamID);
getUserStats(this.steamID);
bot.sendMessage(
msg.chat.id,
`Hello ${this.nickname}! This is your steamID: ${this.steamID}
Wanna some stats? Use /getkdr or /getbest to get your info!`
);
})
.catch((error) => {
bot.sendMessage(
msg.chat.id,
`${name}! This steamID looks weird or you CSGO Data is closed by privacy. Check is your ID is correct and also check privacy settings in your steam.`
);
resetBot(msg);
console.log("This is error in SteamID detection: ", error);
});
} else {
bot.sendMessage(
msg.chat.id,
`Hello ${name}! Please send me your steam ID. I need it to identify you.`
);
this.waitingForID = true;
}
} else {
// RESET command
if (msg.text.includes("/reset")) {
resetBot(msg);
return;
}
if (msg.text.includes("/getkdr")) {
getKdr(msg);
return;
}
if (msg.text.includes("/getbest")) {
getMostEffectiveGun(msg);
return;
}
if (msg.text.includes("/last")) {
getLastMatchData(msg);
return;
}
if (msg.text.includes("/stattrak")) {
if (this.stattrak.length > 0) {
getStatTrak(msg);
} else {
bot.sendMessage(
msg.chat.id,
`Hello ${name}! Not enough data for get stattrak - call /getbest firstly and then repeat to stattrak.`
);
}
return;
}
bot.sendMessage(
msg.chat.id,
`Hello ${name}! How can I help you? Choose any command use /help to show all commands list`
);
}
});
const countAdr = (summary) => {
return summary.stats.total_kills / summary.stats.total_deaths;
};
const countAccuracy = (summary) => {
return (
(summary.stats.total_shots_hit / summary.stats.total_shots_fired) * 100
);
};
const getKdr = (msg) => {
this.adr = countAdr(this.summary);
this.accuracy = countAccuracy(this.summary);
bot.sendMessage(
msg.chat.id,
"Well, " +
this.nickname +
"... " +
`Your KDR: ${this.adr.toFixed(
3
)}. Your average accuracy: ${this.accuracy.toFixed(3)}`
);
return this.adr;
};
// Get user CSGO Data
const getUserData = (steamId) => {
if (typeof steamId !== "string") {
return;
}
try {
steam.getUserSummary(steamId).then((data) => {
// console.log(data)
this.nickname = data.nickname;
});
} catch (error) {
console.log("Error in userData");
}
};
// Get user CSGO statistics
const getUserStats = (steamId) => {
if (typeof steamId !== "string") {
return;
}
try {
steam.getUserStats(steamId, "730").then((summary) => {
this.summary = summary;
});
} catch (error) {
console.log("Error in userStats");
}
};
// Get last match Data
const getLastMatchData = (msg) => {
let lastMatchData = {};
Object.entries(this.summary.stats).forEach((item) => {
if (item[0].includes("last")) {
lastMatchData[item[0]] = item[1];
}
});
let allrounds =
lastMatchData.last_match_t_wins + lastMatchData.last_match_ct_wins;
let lastMatch = {
mode: lastMatchData["last_match_rounds"] >= 30 ? "competetive" : "casual",
kills: lastMatchData["last_match_kills"],
deaths: lastMatchData["last_match_deaths"],
kdr: lastMatchData["last_match_kills"] / lastMatchData["last_match_deaths"],
mvp: lastMatchData["last_match_mvps"],
fav_weapon:
allWeaponsID[Number(lastMatchData.last_match_favweapon_id)] !== undefined
? allWeaponsID[Number(lastMatchData.last_match_favweapon_id)]
: "none",
fav_weapon_kills: lastMatchData.last_match_favweapon_kills,
average_damage: lastMatchData.last_match_damage / allrounds,
};
bot.sendMessage(
msg.chat.id,
`WP ${this.nickname}!
Last Match data:
----------------------------------
You have played in ${lastMatch.mode} mode;
You've made ${lastMatch.kills} frags, and died ${lastMatch.deaths} times.
KDR: ${lastMatch.kdr.toFixed(2)};
${lastMatch.kdr >= 1 ? "Good Job!" : "You can do better maaaan!"}
----------------------------------
ADR: ${lastMatch.average_damage};
${
lastMatch.average_damage >= 100
? "Smoookin shoting, maaaan!"
: "Try harder"
}
----------------------------------
MVP: ${lastMatch.mvp};
${
lastMatch.mvp > 0
? "Who is good boy here?!?"
: "Next time, man... Next time..."
}
----------------------------------
FAV.WEAPON: ${lastMatch.fav_weapon};
FAV.WEAPON KILLS: ${lastMatch.fav_weapon_kills};
${
lastMatch.fav_weapon_kills / lastMatchData["last_match_kills"] >= 0.5
? "You know how to shoot with this baby, try something else"
: "Still not impressive..."
}
----------------------------------
`
);
};
// Reset all bot data
const resetBot = (msg) => {
this.steamID = "";
this.nickname = "";
this.summary = {};
this.adr = 0;
this.accuracy = "";
this.waitingForID = false;
const name = msg.from.first_name;
bot.sendMessage(msg.chat.id, `Ok, ${name}! All data clear!`);
this.stattrak = [];
};
// Return most effecxtive gun in each category
const getMostEffectiveGun = (msg) => {
let totalKills = {
pistols: {
name: "",
kills: 0,
},
rifles: {
name: "",
kills: 0,
},
sniper: {
name: "",
kills: 0,
},
shotgun: {
name: "",
kills: 0,
},
smallgun: {
name: "",
kills: 0,
},
heavy: {
name: "",
kills: 0,
},
};
this.stattrak = [];
Object.entries(this.summary.stats).forEach((item) => {
if (item[0].includes("total_kills_")) {
let weaponName = item[0].replace("total_kills_", "");
this.stattrak.push(item);
Object.entries(allGuns).forEach((type) => {
type[1].forEach((gun) => {
if (gun === weaponName) {
let value = totalKills[type[0]].kills;
// console.log("Category: ", type[0]);
// console.log("Name: ", weaponName);
// console.log("Current value: ", item[1]);
// console.log("Previous value: ", value);
// console.log("///////////");
if (item[1] >= value) {
const patch = "usp-s";
totalKills[type[0]].name =
weaponName === "hkp2000" ? patch : weaponName;
totalKills[type[0]].kills = item[1];
}
}
});
});
}
});
bot.sendMessage(
msg.chat.id,
`Ok, ${this.nickname}!
This is your's most performatic guns:
----------------------
| Pistols | ${totalKills.pistols.name.toUpperCase()} :: ${
totalKills.pistols.kills
} kills.
----------------------
| Riffles | ${totalKills.rifles.name.toUpperCase()} :: ${
totalKills.rifles.kills
} kills.
----------------------
| Sniper Riffles| ${totalKills.sniper.name.toUpperCase()} :: ${
totalKills.sniper.kills
} kills.
----------------------
| Shot Gun| ${totalKills.shotgun.name.toUpperCase()} :: ${
totalKills.shotgun.kills
} kills.
----------------------
| Farm Gun | ${totalKills.smallgun.name.toUpperCase()} :: ${
totalKills.smallgun.kills
} kills.
----------------------
| Heavy weapon | ${totalKills.heavy.name.toUpperCase()} :: ${
totalKills.heavy.kills
} kills.
----------------------
Wow. Awesome!
Also NOW u can run command /stattrak to grab and show all weapon data.
`
);
};
// Get all data for the kills.
const getStatTrak = (msg) => {
let message = "";
this.stattrak.forEach((item) => {
let name = item[0].replace("total_kills_", "");
message =
message +
`\n ${name.toUpperCase()} :: ${
item[1]
} kills\n -------------------------`;
});
bot.sendMessage(
msg.chat.id,
`Ok, ${this.nickname}!
${message}`
);
};
module.exports = bot;
|
'''
Main script with visualization
'''
import pygame
import sys
from os import path, pardir
import time
from maze import *
from pygame_helpers import *
class Game:
"""
A general visualization class for pygame
"""
DARKPINK = (219, 0, 189)
DARKBLUE = (95, 0, 219)
LIGHTBLUE = (138, 138, 219)
LIGHTPINK = (192, 138, 219)
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
def __init__(self):
pygame.mixer.pre_init(44100, -16, 2, 2048)
pygame.mixer.init()
pygame.init()
# Make screen
self.screen = pygame.display.set_mode((800, 800))
#self.screen = pygame.display.set_mode((0, 0), pygame.FULLSCREEN)
self.scr_width, self.scr_height = pygame.display.get_surface().get_size()
pygame.display.set_caption("Maze Solver")
# Load data and start te clock
self.clock = pygame.time.Clock()
self.load_data()
def load_data(self):
"""
Load all the external data
"""
game_folder = path.dirname(path.join(__file__, pardir))
data_folder = path.join(game_folder, 'data')
def new(self):
"""
New game
"""
# initialize all variables and do all the setup for a new game
self.all_sprites = pygame.sprite.LayeredUpdates()
self.maze_obj = Maze('data/maze.txt', visualization=self)
self.TIMESTEP = 50
self.create_maze()
def run(self):
"""
Run the graphics
"""
# game loop - set self.playing = False to end the game
self.playing = True
while self.playing:
self.dt = self.clock.tick(60) / 1000.0
self.events()
self.draw()
self.update()
def create_maze(self):
"""
Just draw the maze begginning and then just update the tiles
"""
for i in range(0, len(self.maze_obj.maze)):
for j in range(0, len(self.maze_obj.maze[i])):
Tile(self, self.maze_obj, j, i)
def quit(self):
pygame.quit()
sys.exit()
def update(self):
"""
The whole visualization
"""
pygame.time.wait(2000)
self.maze_obj.find_path()
pygame.time.wait(5000)
self.quit()
def draw(self):
"""
Blit everything to the screen each frame
"""
self.screen.fill((125, 100, 158))
self.draw_text(f"Delay: {self.TIMESTEP} ms", 40, (51, 16, 97), self.scr_width//2, 50)
# self.all_sprites.draw(self.screen)
for sprite in self.all_sprites:
self.screen.blit(sprite.image, (sprite.rect.x, sprite.rect.y))
pygame.display.flip()
def events(self):
# catch all events here
for event in pygame.event.get():
if event.type == pygame.QUIT:
self.quit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_F4:
self.quit()
if event.key == pygame.K_UP:
self.TIMESTEP -= 10
if event.key == pygame.K_DOWN:
self.TIMESTEP += 10
def draw_text(self, text, size, color, x, y, align='center', fontname="Consolas"):
"""
Helper for drawing text on the screen
"""
font = pygame.font.SysFont(path.join("Consolas", fontname), size)
text_surface = font.render(text, True, color)
text_rect = text_surface.get_rect()
if align == "nw":
text_rect.topleft = (x, y)
if align == "ne":
text_rect.topright = (x, y)
if align == "sw":
text_rect.bottomleft = (x, y)
if align == "se":
text_rect.bottomright = (x, y)
if align == "n":
text_rect.midtop = (x, y)
if align == "s":
text_rect.midbottom = (x, y)
if align == "e":
text_rect.midright = (x, y)
if align == "w":
text_rect.midleft = (x, y)
if align == "center":
text_rect.center = (x, y)
self.screen.blit(text_surface, text_rect)
if __name__ == "__main__":
# create the game object
g = Game()
while True:
g.new()
g.run() |
import { Routes } from '@angular/router';
export const AuthLayoutRoutes: Routes = [
{
path: 'login',
loadChildren: () => import('./login/login.module').then(m => m.LoginModule)
},
{
path: 'register',
loadChildren: () => import('./register/register.module').then(m => m.RegisterModule)
}
]; |
#!/bin/bash
#######################################################################
#
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the Apache License.
#
#######################################################################
#######################################################################
#
# perf_mariadb.sh
# Description:
# Install mariadb and sysbench.
# Run performance test of MariaDB using sysbench.
# This script needs to be run on client VM.
#
# Supported Distros:
# Ubuntu/Centos/RedHat/Debian
# TODO: We will support SUSE and other distros in the future.
#######################################################################
CONSTANTS_FILE="./constants.sh"
UTIL_FILE="./utils.sh"
HOMEDIR=$(pwd)
LOG_FOLDER="${HOMEDIR}/mariadb_log"
MARIADB_RESULT="${LOG_FOLDER}/report.csv"
MARIADB_LOG_NAME="mariadb.bench.log"
COMMON_LOG_FILE="${LOG_FOLDER}/common.log"
SYSBENCH_VERSION=1.0.20
db_path="/maria/db"
db_parent_path="/maria"
escaped_path=$(echo "${db_path}" | sed 's/\//\\\//g')
user="lisa"
. ${CONSTANTS_FILE} || {
LogErr "Error: missing ${CONSTANTS_FILE} file"
SetTestStateAborted
exit 0
}
. ${UTIL_FILE} || {
LogErr "Missing ${UTIL_FILE} file"
SetTestStateAborted
exit 0
}
if [ ! "${server}" ]; then
LogErr "Please add/provide value for server in constants.sh. server=<server ip>"
SetTestStateAborted
exit 0
fi
if [ ! "${client}" ]; then
LogErr "Please add/provide value for client in constants.sh. client=<client ip>"
SetTestStateAborted
exit 0
fi
if [ ! "${THREADS}" ]; then
THREADS=(1 2 4 8 16 32 64 128 256)
fi
if [ ! "${MAX_TIME}" ]; then
MAX_TIME=300
fi
function install_sysbench () {
LogMsg "Getting sysbench"
wget https://github.com/akopytov/sysbench/archive/${SYSBENCH_VERSION}.tar.gz
if [ $? -gt 0 ]; then
LogErr "Failed to download sysbench"
SetTestStateAborted
exit 0
fi
tar -xzvf ${SYSBENCH_VERSION}.tar.gz
if [ $? -gt 0 ]; then
LogErr "Failed to unzip sysbench"
SetTestStateAborted
exit 0
fi
LogMsg "Building sysbench"
pushd "${HOMEDIR}/sysbench-${SYSBENCH_VERSION}"
./autogen.sh && ./configure && make && make install
if [ $? -gt 0 ]; then
LogErr "Failed to build sysbench"
SetTestStateAborted
exit 0
fi
popd
export PATH="/usr/local/bin:${PATH}"
LogMsg "Sysbench installed successfully"
}
function create_data_dir () {
disks=$(ssh "${server}" ". $UTIL_FILE && get_AvailableDisks")
# We just use only one disk
for disk in ${disks}
do
Run_SSHCommand "${server}" "mkdir -p ${db_path}"
Run_SSHCommand "${server}" "yes | mkfs.ext4 /dev/${disk}"
Run_SSHCommand "${server}" "mount /dev/${disk} ${db_path}"
Run_SSHCommand "${server}" "cp -rf /var/lib/mysql/* ${db_path}"
Run_SSHCommand "${server}" "chown -R mysql:mysql ${db_path}"
Run_SSHCommand "${server}" "chmod 0755 -R ${db_path}"
# We also need to ensure that all the parent directories of the datadir upwards
# have "x" (execute) permissions for all (user, group, and other)
# Refer to https://mariadb.com/kb/en/what-to-do-if-mariadb-doesnt-start/#cant-create-test-file
Run_SSHCommand "${server}" "chmod 0755 -R ${db_parent_path}"
break
done
}
function config_mariadb () {
case "$DISTRO_NAME" in
oracle|rhel|centos|almalinux|rockylinux)
# Mariadb is not enabled by default
Run_SSHCommand "${server}" "service mariadb stop"
Run_SSHCommand "${server}" "echo datadir = ${db_path} | sudo tee --append /etc/my.cnf.d/mariadb-server.cnf"
Run_SSHCommand "${server}" "echo bind-address = 0.0.0.0 | sudo tee --append /etc/my.cnf.d/mariadb-server.cnf"
Run_SSHCommand "${server}" "echo max_connections = 1024 | sudo tee --append /etc/my.cnf.d/mariadb-server.cnf"
# Config the systemd service to set the open files limit as infinity
# Refer to https://mariadb.com/kb/en/systemd/
Run_SSHCommand "${server}" "echo LimitNOFILE=infinity | sudo tee --append /lib/systemd/system/mariadb.service"
Run_SSHCommand "${server}" "systemctl daemon-reload"
# Config selinux to set enforcing to permissive
# Refer to https://blogs.oracle.com/jsmyth/selinux-and-mysql
Run_SSHCommand "${server}" "setenforce 0"
;;
ubuntu|debian)
Run_SSHCommand "${server}" "service mysql stop"
Run_SSHCommand "${server}" "sed -i '/datadir/c\datadir = ${escaped_path}' /etc/mysql/mariadb.conf.d/50-server.cnf"
Run_SSHCommand "${server}" "sed -i '/bind-address/c\bind-address = 0\.0\.0\.0' /etc/mysql/mariadb.conf.d/50-server.cnf"
Run_SSHCommand "${server}" "sed -i '/max_connections/c\max_connections = 1024' /etc/mysql/mariadb.conf.d/50-server.cnf"
;;
*)
LogErr "Unsupported distribution"
return 1
esac
Run_SSHCommand "${server}" "service mariadb start"
if [ $? -ne 0 ]; then
return 1
fi
}
function config_mariadb_for_remote_access () {
# We can refer to https://webdock.io/en/docs/how-guides/how-enable-remote-access-your-mariadbmysql-database
# to get the meaning of these sql commands.
Run_SSHCommand "${server}" "mysql -e \"GRANT ALL PRIVILEGES ON *.* TO '${user}'@'${client}' IDENTIFIED BY 'lisapassword' WITH GRANT OPTION;\""
Run_SSHCommand "${server}" "mysql -e \"DROP DATABASE sbtest;\""
Run_SSHCommand "${server}" "mysql -e \"CREATE DATABASE sbtest;\""
Run_SSHCommand "${server}" "mysql -e \"FLUSH PRIVILEGES;\""
}
function start_monitor()
{
lteration=${1}
mpstat_cmd="mpstat"
iostat_cmd="iostat"
sar_cmd="sar"
vmstat_cmd="vmstat"
Run_SSHCommand "${server}" "${iostat_cmd} -x -d 1 ${MAX_TIME}" > "${LOG_FOLDER}/${lteration}.iostat.server.log" &
Run_SSHCommand "${server}" "${sar_cmd} -n DEV 1 ${MAX_TIME}" > "${LOG_FOLDER}/${lteration}.sar.server.log" &
Run_SSHCommand "${server}" "${mpstat_cmd} -P ALL 1 ${MAX_TIME}" > "${LOG_FOLDER}/${lteration}.mpstat.server.log" &
Run_SSHCommand "${server}" "${vmstat_cmd} 1 ${MAX_TIME}" > "${LOG_FOLDER}/${lteration}.vmstat.server.log" &
${iostat_cmd} -x -d 1 ${MAX_TIME} > ${LOG_FOLDER}/${lteration}.iostat.client.log &
${sar_cmd} -n DEV 1 ${MAX_TIME} > ${LOG_FOLDER}/${lteration}.sar.client.log &
${mpstat_cmd} -P ALL 1 ${MAX_TIME} > ${LOG_FOLDER}/${lteration}.mpstat.client.log &
${vmstat_cmd} 1 ${MAX_TIME} > ${LOG_FOLDER}/${lteration}.vmstat.client.log &
}
function parse_log () {
threads=${1}
mariadb_log_file="${LOG_FOLDER}/${threads}.${MARIADB_LOG_NAME}"
Threads=$(grep "Number of threads:" $mariadb_log_file | awk '{print $NF}')
TotalQueries=$(grep "total:" $mariadb_log_file | awk '{print $NF}')
TransactionsPerSec=$(grep "transactions:" $mariadb_log_file | awk -F '(' '{print $2}' | awk '{print $1}')
Latency95Percentile_ms=$(grep "95th percentile:" $mariadb_log_file | awk '{print $NF}')
LogMsg "Test Results: "
LogMsg "---------------"
LogMsg "Threads: $Threads"
LogMsg "Total Queries: $TotalQueries"
LogMsg "Transactions Per Sec: $TransactionsPerSec"
LogMsg "95 Percentile latency(ms): $Latency95Percentile_ms"
echo "$Threads,$TotalQueries,$TransactionsPerSec,$Latency95Percentile_ms" >> "${MARIADB_RESULT}"
}
function run_mariadb () {
threads=$1
LogMsg "======================================"
LogMsg "Running mariadb test with current threads: ${threads}"
LogMsg "======================================"
start_monitor ${threads}
sysbench ${oltp_path} --mysql-host=${server} --mysql-user=${user} --mysql-password=lisapassword \
--mysql-db=sbtest --time=${MAX_TIME} --oltp-test-mode=complex --mysql-table-engine=innodb --oltp-read-only=off \
--max-requests=1000000 --num-threads=${threads} run > "${LOG_FOLDER}/${threads}.${MARIADB_LOG_NAME}"
parse_log ${threads}
}
Run_SSHCommand "${server}" "rm -rf ${LOG_FOLDER}"
Run_SSHCommand "${server}" "mkdir -p ${LOG_FOLDER}"
rm -rf ${LOG_FOLDER}
mkdir -p ${LOG_FOLDER}
# Install mariadb in client and server machine
LogMsg "Configuring client ${client}..."
install_mariadb
if [ $? -ne 0 ]; then
LogErr "Mariadb installation failed in ${client}.."
SetTestStateAborted
exit 0
fi
LogMsg "Configuring server ${server}..."
Run_SSHCommand "${server}" ". $UTIL_FILE && install_mariadb"
if [ $? -ne 0 ]; then
LogErr "Mariadb installation failed in ${server}..."
SetTestStateAborted
exit 0
fi
# Install sysbench in client machine
LogMsg "Installing sysbench in client ${client}..."
install_sysbench
# Create data directory of mysql on server machine
LogMsg "Creating data directory of mysql in server ${server}..."
create_data_dir
if [ $? -ne 0 ]; then
LogErr "Creating data directory of mysql failed in server ${server}.."
SetTestStateAborted
exit 0
fi
# Config MariaDB
LogMsg "Configing MariaDB in server ${server}..."
config_mariadb
if [ $? -ne 0 ]; then
LogErr "Configing MarinaDB failed in server ${server}.."
SetTestStateAborted
exit 0
fi
# Config MariaDB for remote access
LogMsg "Configuring MariaDB for remote access in server ${server}..."
config_mariadb_for_remote_access
if [ $? -ne 0 ]; then
LogErr "Configuring MarinaDB for remote access failed in server ${server}.."
SetTestStateAborted
exit 0
fi
# Prepare for mariadb test
LogMsg "Prepare for MarinaDB test in client ${client}..."
oltp_path="${HOMEDIR}/sysbench-${SYSBENCH_VERSION}/tests/include/oltp_legacy/oltp.lua"
sysbench ${oltp_path} --mysql-host=${server} --mysql-user=${user} --mysql-password=lisapassword \
--mysql-db=sbtest --oltp-table-size=1000000 prepare >> ${COMMON_LOG_FILE}
echo "Threads,TotalQueries,TransactionsPerSec,Latency95Percentile_ms" > "${MARIADB_RESULT}"
# Run mariadb test
LogMsg "Running MariaDB performance test using sysbench in client ${client}..."
for threads in "${THREADS[@]}"
do
run_mariadb ${threads}
done
sysbench ${oltp_path} --mysql-host=${server} --mysql-user=${user} --mysql-password=lisapassword --mysql-db=sbtest cleanup >> ${COMMON_LOG_FILE}
LogMsg "Kernel Version : $(uname -r)"
LogMsg "Guest OS : ${distro}"
column -s, -t "${MARIADB_RESULT}" > "${LOG_FOLDER}"/report.log
cp "${LOG_FOLDER}"/* .
cat report.log
SetTestStateCompleted
|
import json
data = json.loads('{
"name": "John Smith",
"age": 35
}')
customer_name = data["name"]
print(customer_name) |
#!/usr/bin/env bash
# Base16 Material Lighter - Gnome Terminal color scheme install script
# Nate Peterson
[[ -z "$PROFILE_NAME" ]] && PROFILE_NAME="Base 16 Material Lighter 256"
[[ -z "$PROFILE_SLUG" ]] && PROFILE_SLUG="base-16-material-lighter-256"
[[ -z "$DCONF" ]] && DCONF=dconf
[[ -z "$UUIDGEN" ]] && UUIDGEN=uuidgen
dset() {
local key="$1"; shift
local val="$1"; shift
if [[ "$type" == "string" ]]; then
val="'$val'"
fi
"$DCONF" write "$PROFILE_KEY/$key" "$val"
}
# Because dconf still doesn't have "append"
dlist_append() {
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$DCONF" read "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "'$val'"
} | head -c-1 | tr "\n" ,
)"
"$DCONF" write "$key" "[$entries]"
}
# Newest versions of gnome-terminal use dconf
if which "$DCONF" > /dev/null 2>&1; then
# Check that uuidgen is available
type $UUIDGEN >/dev/null 2>&1 || { echo >&2 "Requires uuidgen but it's not installed. Aborting!"; exit 1; }
[[ -z "$BASE_KEY_NEW" ]] && BASE_KEY_NEW=/org/gnome/terminal/legacy/profiles:
if [[ -n "`$DCONF list $BASE_KEY_NEW/`" ]]; then
if which "$UUIDGEN" > /dev/null 2>&1; then
PROFILE_SLUG=`uuidgen`
fi
if [[ -n "`$DCONF read $BASE_KEY_NEW/default`" ]]; then
DEFAULT_SLUG=`$DCONF read $BASE_KEY_NEW/default | tr -d \'`
else
DEFAULT_SLUG=`$DCONF list $BASE_KEY_NEW/ | grep '^:' | head -n1 | tr -d :/`
fi
DEFAULT_KEY="$BASE_KEY_NEW/:$DEFAULT_SLUG"
PROFILE_KEY="$BASE_KEY_NEW/:$PROFILE_SLUG"
# Copy existing settings from default profile
$DCONF dump "$DEFAULT_KEY/" | $DCONF load "$PROFILE_KEY/"
# Add new copy to list of profiles
dlist_append $BASE_KEY_NEW/list "$PROFILE_SLUG"
# Update profile values with theme options
dset visible-name "'$PROFILE_NAME'"
dset palette "['#fafafa', '#ff5370', '#91b859', '#ffb62c', '#6182b8', '#7c4dff', '#39adb5', '#80cbc4', '#ccd7da', '#ff5370', '#91b859', '#ffb62c', '#6182b8', '#7c4dff', '#39adb5', '#ffffff']"
dset background-color "'#fafafa'"
dset foreground-color "'#80cbc4'"
dset bold-color "'#80cbc4'"
dset bold-color-same-as-fg "true"
dset cursor-colors-set "true"
dset cursor-background-color "'#80cbc4'"
dset cursor-foreground-color "'#fafafa'"
dset use-theme-colors "false"
dset use-theme-background "false"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONF
unset UUIDGEN
exit 0
fi
fi
# Fallback for Gnome 2 and early Gnome 3
[[ -z "$GCONFTOOL" ]] && GCONFTOOL=gconftool
[[ -z "$BASE_KEY" ]] && BASE_KEY=/apps/gnome-terminal/profiles
PROFILE_KEY="$BASE_KEY/$PROFILE_SLUG"
gset() {
local type="$1"; shift
local key="$1"; shift
local val="$1"; shift
"$GCONFTOOL" --set --type "$type" "$PROFILE_KEY/$key" -- "$val"
}
# Because gconftool doesn't have "append"
glist_append() {
local type="$1"; shift
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$GCONFTOOL" --get "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "$val"
} | head -c-1 | tr "\n" ,
)"
"$GCONFTOOL" --set --type list --list-type $type "$key" "[$entries]"
}
# Append the Base16 profile to the profile list
glist_append string /apps/gnome-terminal/global/profile_list "$PROFILE_SLUG"
gset string visible_name "$PROFILE_NAME"
gset string palette "#fafafa:#ff5370:#91b859:#ffb62c:#6182b8:#7c4dff:#39adb5:#80cbc4:#ccd7da:#ff5370:#91b859:#ffb62c:#6182b8:#7c4dff:#39adb5:#ffffff"
gset string background_color "#fafafa"
gset string foreground_color "#80cbc4"
gset string bold_color "#80cbc4"
gset bool bold_color_same_as_fg "true"
gset bool cursor-colors-set "true"
gset string cursor-background-color "'#80cbc4'"
gset string cursor-foreground-color "'#fafafa'"
gset bool use_theme_colors "false"
gset bool use_theme_background "false"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONF
unset UUIDGEN
|
<filename>src/js/api/attachment.js
if (
typeof require !== "undefined" &&
(typeof window === "undefined" ||
// eslint-disable-next-line camelcase
typeof __webpack_require__ !== "undefined" ||
(typeof navigator !== "undefined" && navigator.product === "ReactNative"))
) {
// eslint-disable-next-line no-redeclare,no-var
var base = require("../base");
// eslint-disable-next-line no-redeclare,no-var
var ripe = base.ripe;
}
/**
* Returns the URL that redirects to where an attachment is hosted,
* using the provided secret key as reference.
*
* @param {String} key The secret key to be used in attachment retrieval.
* @param {Object} options A map with options for the URL building.
* @returns {String} The URL that can be used to view an attachment.
*/
ripe.Ripe.prototype.getAttachmentOrderUrl = function(key, options) {
options = options === undefined ? {} : options;
const url = `${this.webUrl}attachments/${key}/data`;
options = Object.assign(options, { url: url });
return options.url + "?" + this._buildQuery(options.params);
};
|
#!/bin/sh
while true; do
# watch web docker
if $(printf "GET / HTTP/1.0\n\n" | nc -w 2 $INSIDEWEB_PORT_80_TCP_ADDR $INSIDEWEB_PORT_80_TCP_PORT | grep -q '200 OK'); then
echo "System up."
else
# send to mailer docker
printf "To: admin@work Message: The service is down!" | nc $INSIDEMAILER_PORT_33333_TCP_ADDR $INSIDEMAILER_PORT_33333_TCP_PORT
break
fi
sleep 1
done
|
<gh_stars>0
package ru.job4j.accidents.mem;
import ru.job4j.accidents.model.Accident;
import java.util.Map;
/**
* @author Sir-Hedgehog (mailto:<EMAIL>)
* @version 6.0
* @since 23.06.2020
*/
public class AccidentMem {
private Map<Integer, Accident> accidents;
public AccidentMem(Map<Integer, Accident> accidents) {
this.accidents = accidents;
}
/**
* Метод формирует результирующий список правонарушений
* @return - список правонарушений
*/
public Map<Integer, Accident> getAccidents() {
return accidents;
}
/**
* Метод вставляет данные о правонарушении в хранилище
*/
public void createAccident(Accident accident) {
accidents.put(accident.getId(), accident);
}
/**
* Метод обновляет данные о правонарушении в хранилище
*/
public void updateAccident(Accident accident) {
accidents.put(accident.getId(), accident);
}
}
|
#!/bin/bash -e
# Builds and tests SOCI backend Oracle at travis-ci.org
#
# Copyright (c) 2013 Mateusz Loskot <mateusz@loskot.net>
#
source ${TRAVIS_BUILD_DIR}/scripts/travis/common.sh
source ${TRAVIS_BUILD_DIR}/scripts/travis/oracle.sh
cmake ${SOCI_DEFAULT_CMAKE_OPTIONS} \
-DWITH_BOOST=OFF \
-DSOCI_ORACLE=ON \
-DSOCI_ORACLE_TEST_CONNSTR:STRING="service=XE user=travis password=travis" \
..
run_make
run_test
|
package nl.pvanassen.steam.store.buy;
/**
* Result of a purchase attempt
*
* @author <NAME>
*/
public class BuyResult {
private final boolean success;
private final int wallet;
private final String message;
BuyResult(boolean success, int wallet, String message) {
super();
this.success = success;
this.wallet = wallet;
this.message = message;
}
/**
* @return The error message
*/
public String getMessage() {
return message;
}
/**
* @return the wallet
*/
public int getWallet() {
return wallet;
}
/**
* @return the success
*/
public boolean isSuccess() {
return success;
}
/**
* {@inheritDoc}
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return "BuyResult [success=" + success + ", wallet=" + wallet + "]";
}
}
|
def days_to_months_years(days):
months, years = 0, 0
if days >= 365:
years = days // 365
days = days % 365
months = days // 30
days = days % 30
else:
months = days // 30
days = days % 30
return years, months, days
days = 2268
years, months, days = days_to_months_years(days)
print('Years: {}, Months: {}, Days: {}'.format(years, months, days)) |
<reponame>kakarotto7/weather-radar<gh_stars>0
import React from 'react';
import dayjs from 'dayjs';
import PropTypes from 'prop-types';
import * as weatherIcons from '../icons';
const Forecast = (props) => {
const { forecast } = props;
const iconPrefix = 'wi wi-';
return (
<div className="mt-4 border-t border-green-300">
{forecast.map((item, index) => {
const currentHour = dayjs(item.date).format('H');
const isDay = currentHour > 7 && currentHour < 19 ? true : false;
const icon =
iconPrefix +
weatherIcons.default[isDay ? 'day' : 'night'][item.icon_id].icon;
return (
<ul className="mt-4" key={index}>
<li className="flex flex-row text-gray-500 p-1">
<span className="flex-1 text-left">
{dayjs(item.dt_txt).format('dddd')}
</span>
<span className="text-indigo-700 text-2xl">
<span className={icon}></span>
</span>
<span className="flex-1 text-right">
{item.min}° / {item.max}°
</span>
</li>
</ul>
);
})}
</div>
);
};
Forecast.propTypes = {
forecast: PropTypes.array,
};
export default Forecast;
|
package com.irmansyah.kamusku.ui.translete;
import android.arch.lifecycle.ViewModelProvider;
import android.support.v7.widget.LinearLayoutManager;
import com.irmansyah.kamusku.ViewModelProviderFactory;
import com.irmansyah.kamusku.data.DataManager;
import com.irmansyah.kamusku.data.model.db.EnglishIndonesia;
import com.irmansyah.kamusku.data.model.db.IndonesiaEnglish;
import com.irmansyah.kamusku.utils.rx.SchedulerProvider;
import java.util.ArrayList;
import dagger.Module;
import dagger.Provides;
/**
* Created by irmansyah on 08/03/18.
*/
@Module
public class TransleteActivityModule {
@Provides
TransleteViewModel provideMainViewModel(DataManager dataManager,
SchedulerProvider schedulerProvider) {
return new TransleteViewModel(dataManager, schedulerProvider);
}
@Provides
EngIndAdapter provideEngIndAdapter() {
return new EngIndAdapter(new ArrayList<EnglishIndonesia>());
}
@Provides
IndEngAdapter provideIndEngAdapter() {
return new IndEngAdapter(new ArrayList<IndonesiaEnglish>());
}
@Provides
LinearLayoutManager provideSearchLinearLayoutManager(TransleteActivity activity) {
return new LinearLayoutManager(activity);
}
@Provides
ViewModelProvider.Factory provideSearchViewModelProviderFactory(TransleteViewModel viewModel) {
return new ViewModelProviderFactory<>(viewModel);
}
}
|
const httpStatus = require('http-status');
const pick = require('../utils/pick');
const ApiError = require('../utils/ApiError');
const catchAsync = require('../utils/catchAsync');
const {handbookServer } =require('../services');
const createHandbook = catchAsync(async (req,res) => {
const createHB = await handbookServer.createHandbook(req.body,req.user.id);
res.status(httpStatus.CREATED).send(createHB);
});
const getHandbooks = catchAsync(async (req, res) =>{
const filter = pick(req.query, ['nameHandbook','severity','icon'])
const options = pick(req.query,['sortBy','limit','page']);
const result = await handbookServer.queryHandbook(filter, options);
res.send(result);
});
const getHandbook = catchAsync(async (req, res) => {
const getHandbook = await handbookServer.getHandbookById(req.params.handbookId);
if(!getHandbook){
throw new ApiError(httpStatus.NOT_FOUND,'Handbook not found');
}
res.send(getHandbook);
});
const updateHandbook = catchAsync(async (req, res) => {
const updateHB = await handbookServer.updateHandbookById(req.params.handbookId, req.body, req.user.id);
res.send(updateHB);
});
const deleteHandbook = catchAsync(async (req, res) => {
await handbookServer.deleteHandbookById(req.params.handbookId);
res.status(httpStatus.NO_CONTENT).send();
});
module.exports = {
createHandbook,
getHandbooks,
getHandbook,
updateHandbook,
deleteHandbook
}
|
<reponame>moritzheiber/hello-world-app
#!/usr/bin/env ruby
require 'socket'
require 'webrick'
include WEBrick
s = HTTPServer.new(
Port: 8000,
DocumentRoot: "."
)
trap('TERM'){ s.shutdown }
s.start
|
import re
def extract_copyright_info(file_path):
copyright_info = {
"year": "",
"holder": "",
"license": "",
"version": ""
}
with open(file_path, 'r') as file:
content = file.read()
match = re.search(r'# Copyright (\d{4}) (.+?)\n#\n# Licensed under the (.+?), Version (\d+\.\d+) \(the "License"\);', content, re.DOTALL)
if match:
copyright_info["year"] = match.group(1)
copyright_info["holder"] = match.group(2)
copyright_info["license"] = match.group(3)
copyright_info["version"] = match.group(4)
return copyright_info |
sub=($@)
DIR=$( dirname ${BASH_SOURCE})
for ((i=0; i<${#sub[@]};i++))
do
oarsub -l /nodes=1/core=4,walltime=1:0:0 -p 'mem_node > 10*1024' "$DIR/fla_5_6_1_list_hand_fsl.sh ${sub[i]}"
done
|
package com.mottledog.dao;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
import com.mottledog.bo.User;
/**
* @ClassName: UserDAO
* @Description: UserDAO
* @author tianli
* @date 2015-1-21 14:37:23
*/
@Repository
@Transactional(readOnly = true)
public class UserDao extends BaseDao<User, Integer>{
}
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/Alpha/Alpha.framework"
install_framework "$BUILT_PRODUCTS_DIR/Bravo/Bravo.framework"
install_framework "$BUILT_PRODUCTS_DIR/Delta/Delta.framework"
install_framework "$BUILT_PRODUCTS_DIR/Charlie/Charlie.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/Alpha/Alpha.framework"
install_framework "$BUILT_PRODUCTS_DIR/Bravo/Bravo.framework"
install_framework "$BUILT_PRODUCTS_DIR/Delta/Delta.framework"
install_framework "$BUILT_PRODUCTS_DIR/Charlie/Charlie.framework"
fi
|
sudo docker exec -d sql2017cu10\
/opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P 'Sql2017isfast' -Q 'RESTORE DATABASE WideWorldImporters FROM DISK = "/var/opt/mssql/WideWorldImporters-Full.bak" WITH MOVE "WWI_Primary" TO "/var/opt/mssql/data/WideWorldImporters.mdf", MOVE "WWI_UserData" TO "/var/opt/mssql/data/WideWorldImporters_userdata.ndf", MOVE "WWI_Log" TO "/var/opt/mssql/data/WideWorldImporters.ldf", MOVE "WWI_InMemory_Data_1" TO "/var/opt/mssql/data/WideWorldImporters_InMemory_Data_1"'
|
#!/bin/bash
if [ -z "${PG_PASS}" ]
then
echo "You need to set PG_PASS environment variable to run this script"
exit 1
fi
if [ -z "$ONLY" ]
then
host=`hostname`
if [ $host = "cncftest.io" ]
then
all=`cat ./devel/all_test_projects.txt`
else
all=`cat ./devel/all_prod_projects.txt`
fi
else
all=$ONLY
fi
for proj in $all
do
db=$proj
if [ "$proj" = "kubernetes" ]
then
db="gha"
elif [ "$proj" = "all" ]
then
db="allprj"
fi
echo "Project: $proj, DB: $db"
GHA2DB_GHAPISKIP=1 PG_DB=$db ./ghapi2db || exit 1
done
echo 'OK'
|
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @format
*/
import {
DownloadOutlined,
LoadingOutlined,
PlusOutlined,
} from '@ant-design/icons';
import {Alert, Button} from 'antd';
import {BundledPluginDetails, DownloadablePluginDetails} from 'flipper-common';
import React, {useMemo} from 'react';
import {useCallback} from 'react';
import {useDispatch, useSelector} from 'react-redux';
import {PluginDefinition} from '../plugin';
import {startPluginDownload} from '../reducers/pluginDownloads';
import {loadPlugin, switchPlugin} from '../reducers/pluginManager';
import {
getActiveClient,
getPluginDownloadStatusMap,
} from '../selectors/connections';
import {Layout} from '../ui';
import {ActivePluginListItem} from '../utils/pluginUtils';
export function PluginActions({
activePlugin,
type,
}: {
activePlugin: ActivePluginListItem;
type: 'link' | 'primary';
}) {
switch (activePlugin.status) {
case 'disabled': {
return <EnableButton plugin={activePlugin.definition} type={type} />;
}
case 'uninstalled': {
return <InstallButton plugin={activePlugin.details} type={type} />;
}
case 'unavailable': {
return type === 'primary' ? (
<UnavailabilityAlert reason={activePlugin.reason} />
) : null;
}
default:
return null;
}
}
function EnableButton({
plugin,
type,
}: {
plugin: PluginDefinition;
type: 'link' | 'primary';
}) {
const dispatch = useDispatch();
const client = useSelector(getActiveClient);
const enableOrDisablePlugin = useCallback(() => {
dispatch(switchPlugin({plugin, selectedApp: client?.query?.app}));
}, [dispatch, plugin, client]);
return (
<Button
type={type}
icon={<PlusOutlined />}
onClick={enableOrDisablePlugin}
style={{flexGrow: type == 'primary' ? 1 : 0}}>
Enable Plugin
</Button>
);
}
function UnavailabilityAlert({reason}: {reason: string}) {
return (
<Layout.Container center>
<Alert message={reason} type="warning" />
</Layout.Container>
);
}
function InstallButton({
plugin,
type = 'primary',
}: {
plugin: DownloadablePluginDetails | BundledPluginDetails;
type: 'link' | 'primary';
}) {
const dispatch = useDispatch();
const installPlugin = useCallback(() => {
if (plugin.isBundled) {
dispatch(loadPlugin({plugin, enable: true, notifyIfFailed: true}));
} else {
dispatch(startPluginDownload({plugin, startedByUser: true}));
}
}, [plugin, dispatch]);
const downloads = useSelector(getPluginDownloadStatusMap);
const downloadStatus = useMemo(
() => downloads.get(plugin.id),
[downloads, plugin],
);
return (
<Button
type={type}
disabled={!!downloadStatus}
icon={
downloadStatus ? (
<LoadingOutlined size={16} />
) : (
<DownloadOutlined size={16} />
)
}
onClick={installPlugin}
style={{
flexGrow: type === 'primary' ? 1 : 0,
}}>
Install Plugin
</Button>
);
}
|
<gh_stars>10-100
package rule
import (
"sort"
"strings"
"github.com/akutz/sortfold"
"github.com/chrisruffalo/gudgeon/config"
"github.com/chrisruffalo/gudgeon/util"
)
type memoryStore struct {
baseStore
rules map[string][]string
}
func (store *memoryStore) Init(sessionRoot string, config *config.GudgeonConfig, lists []*config.GudgeonList) {
store.rules = make(map[string][]string)
for _, list := range lists {
if _, found := store.rules[list.CanonicalName()]; !found {
startingArrayLength := uint(0)
if config != nil {
startingArrayLength, _ = util.LineCount(config.PathToList(list))
}
store.rules[list.CanonicalName()] = make([]string, 0, startingArrayLength)
}
}
}
func (store *memoryStore) Clear(config *config.GudgeonConfig, list *config.GudgeonList) {
startingArrayLength := uint(0)
if config != nil {
startingArrayLength, _ = util.LineCount(config.PathToList(list))
}
store.rules[list.CanonicalName()] = make([]string, 0, startingArrayLength)
store.removeList(list)
}
func (store *memoryStore) Load(list *config.GudgeonList, rule string) {
store.rules[list.CanonicalName()] = append(store.rules[list.CanonicalName()], strings.ToLower(rule))
store.addList(list)
}
func (store *memoryStore) Finalize(sessionRoot string, lists []*config.GudgeonList) {
for _, list := range lists {
// case insensitive string/rule sort
sort.Slice(store.rules[list.CanonicalName()], func(i, j int) bool {
return sortfold.CompareFold(store.rules[list.CanonicalName()][i], store.rules[list.CanonicalName()][j]) < 0
})
}
}
func (store *memoryStore) foundInList(rules []string, domain string) (bool, string) {
// search for the domain
idx := sort.Search(len(rules), func(i int) bool {
return sortfold.CompareFold(rules[i], domain) >= 0
})
// check that search found what we expected and return true if found
if idx < len(rules) && strings.EqualFold(rules[idx], domain) {
return true, rules[idx]
}
// otherwise return false
return false, ""
}
func (store *memoryStore) FindMatch(lists []*config.GudgeonList, domain string) (Match, *config.GudgeonList, string) {
domains := util.DomainList(domain)
match, list, rule := store.matchForEachOfTypeIn(config.ALLOW, lists, func(listType config.ListType, list *config.GudgeonList) (Match, *config.GudgeonList, string) {
rules, found := store.rules[list.CanonicalName()]
if !found {
return MatchNone, nil, ""
}
for _, d := range domains {
if found, ruleString := store.foundInList(rules, d); found {
return MatchAllow, list, ruleString
}
}
return MatchNone, nil, ""
})
if MatchNone != match {
return match, list, rule
}
match, list, rule = store.matchForEachOfTypeIn(config.BLOCK, lists, func(listType config.ListType, list *config.GudgeonList) (Match, *config.GudgeonList, string) {
rules, found := store.rules[list.CanonicalName()]
if !found {
return MatchNone, nil, ""
}
for _, d := range domains {
if found, ruleString := store.foundInList(rules, d); found {
return MatchBlock, list, ruleString
}
}
return MatchNone, nil, ""
})
return match, list, rule
}
func (store *memoryStore) Close() {
// remove reference to rules
store.rules = make(map[string][]string)
}
|
// clang-format off
//
// Copyright (c) 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// clang-format on
#import "FUIArray.h"
NS_ASSUME_NONNULL_BEGIN
@class FUIIndexArray;
/**
* A protocol to allow instances of FUIIndexArray to raise events through a
* delegate. Raises all Firebase events except @c FIRDataEventTypeValue.
*/
@protocol FUIIndexArrayDelegate <NSObject>
@optional
/**
* Delegate method called when the database reference at an index has
* finished loading its contents.
* @param array The array containing the reference.
* @param ref The reference that was loaded.
* @param object The database reference's contents.
* @param index The index of the reference that was loaded.
*/
- (void)array:(FUIIndexArray *)array
reference:(FIRDatabaseReference *)ref
didLoadObject:(FIRDataSnapshot *)object
atIndex:(NSUInteger)index;
/**
* Delegate method called when the database reference at an index has
* failed to load contents.
* @param array The array containing the reference.
* @param ref The reference that failed to load.
* @param index The index in the array of the reference that failed to load.
* @param error The error that occurred.
*/
- (void)array:(FUIIndexArray *)array
reference:(FIRDatabaseReference *)ref
atIndex:(NSUInteger)index
didFailLoadWithError:(NSError *)error;
/**
* Delegate method which is called whenever an object is added to a
* FirebaseArray. On a FirebaseArray synchronized to a Firebase reference,
* this corresponds to a @c FIRDataEventTypeChildAdded event being raised.
* @param ref The database reference added to the array
* @param index The index the reference was added at
*/
- (void)array:(FUIIndexArray *)array didAddReference:(FIRDatabaseReference *)ref atIndex:(NSUInteger)index;
/**
* Delegate method which is called whenever an object is changed in a
* FirebaseArray. On a FirebaseArray synchronized to a Firebase reference,
* this corresponds to a @c FIRDataEventTypeChildChanged event being raised.
* @param ref The database reference that changed in the array
* @param index The index the reference was changed at
*/
- (void)array:(FUIIndexArray *)array didChangeReference:(FIRDatabaseReference *)ref atIndex:(NSUInteger)index;
/**
* Delegate method which is called whenever an object is removed from a
* FirebaseArray. On a FirebaseArray synchronized to a Firebase reference,
* this corresponds to a @c FIRDataEventTypeChildRemoved event being raised.
* @param ref The database reference removed from the array
* @param index The index the reference was removed at
*/
- (void)array:(FUIIndexArray *)array didRemoveReference:(FIRDatabaseReference *)ref atIndex:(NSUInteger)index;
/**
* Delegate method which is called whenever an object is moved within a
* FirebaseArray. On a FirebaseArray synchronized to a Firebase reference,
* this corresponds to a @c FIRDataEventTypeChildMoved event being raised.
* @param ref The database reference that has moved locations
* @param fromIndex The index the reference is being moved from
* @param toIndex The index the reference is being moved to
*/
- (void)array:(FUIIndexArray *)array didMoveReference:(FIRDatabaseReference *)ref fromIndex:(NSUInteger)fromIndex toIndex:(NSUInteger)toIndex;
/**
* Delegate method which is called whenever the backing query is canceled. This error is fatal
* and the index array will become unusable afterward, so please handle it appropriately
* (i.e. by displaying a modal error explaining why there's no content).
* @param error the error that was raised
*/
- (void)array:(FUIIndexArray *)array queryCancelledWithError:(NSError *)error;
@end
/**
* A FUIIndexArray instance uses a query's contents to query children of
* a separate database reference, which is useful for displaying an indexed list
* of data as described in https://firebase.google.com/docs/database/ios/structure-data
*/
@interface FUIIndexArray : NSObject
/**
* An immutable copy of the loaded contents in the array. Returns an
* empty array if no contents have loaded yet.
*/
@property(nonatomic, copy, readonly) NSArray<FIRDataSnapshot *> *items;
/**
* An immutable copy of the loaded indexes in the array. Returns an empty
* array if no indexes have loaded.
*/
@property(nonatomic, copy, readonly) NSArray<FIRDataSnapshot *> *indexes;
/**
* The delegate that this array should forward events to.
*/
@property(nonatomic, weak) id<FUIIndexArrayDelegate> delegate;
/**
* Returns the number of items in the array.
*/
@property(nonatomic, readonly) NSUInteger count;
- (instancetype)init NS_UNAVAILABLE;
/**
* Initializes a FUIIndexArray with an index query and a data query.
* The array expects the keys of the children of the index query to match exactly children
* of the data query.
* @param index A Firebase database query whose childrens' keys are all children
* of the data query.
* @param data A Firebase database reference whose children will be fetched and used
* to populate the array's contents according to the index query.
* @param delegate The delegate that events should be forwarded to.
*/
- (instancetype)initWithIndex:(id<FUIDataObservable>)index
data:(id<FUIDataObservable>)data
delegate:(nullable id<FUIIndexArrayDelegate>)delegate NS_DESIGNATED_INITIALIZER;
/**
* Initializes a FUIIndexArray with an index query and a data query.
* The array expects the keys of the children of the index query to be children
* of the data query.
* @param index A Firebase database query whose childrens' keys are all children
* of the data query.
* @param data A Firebase database reference whose children will be fetched and used
* to populate the array's contents according to the index query.
*/
- (instancetype)initWithIndex:(id<FUIDataObservable>)index
data:(id<FUIDataObservable>)data;
/**
* Returns the snapshot at the given index, if it has loaded.
* Raises a fatal error if the index is out of bounds.
* @param index The index of the requested snapshot.
* @return A snapshot, or nil if one has not yet been loaded.
*/
- (nullable FIRDataSnapshot *)objectAtIndex:(NSUInteger)index;
/**
* Starts observing the index array's listeners. The indexed array will pass updates to its delegate
* until the `invalidate` method is called.
*/
- (void)observeQuery;
/**
* Removes all observers from all queries managed by this array and renders this array
* unusable. Initialize a new array instead of reusing this array.
*/
- (void)invalidate;
@end
NS_ASSUME_NONNULL_END
|
<filename>acmicpc/10163/10163.py<gh_stars>1-10
N = int(input())
paper = [[0]*101 for _ in range(101)]
count = [0]*(N+1)
for n in range(1, N+1):
data = list(map(int, input().split()))
for i in range(data[0], data[0]+data[2]):
for j in range(data[1], data[1]+data[3]):
paper[i][j] = n
for e in paper:
for a in e:
if a:
count[a] += 1
for e in count[1:]:
print(e)
|
<gh_stars>1-10
import {
ADD_FEATURE,
EDIT_FEATURE,
DELETE_FEATURE,
SET_FEATURES,
UPDATE_FEATURES_AFTER_IDEA_DELETE
} from './types';
const endpoint = process.env.REACT_APP_ENDPOINT;
const featuresAPIRoute = '/api/features';
export const addFeature = (id, feature) => ({
type: ADD_FEATURE,
id,
feature
});
export const startAddFeature = (ideaId, featureData) => {
return dispatch => {
const { id, text } = featureData;
const configuration = {
method: 'POST',
headers: {
'Content-Type': 'application/json',
authorization: localStorage.getItem('token')
},
body: JSON.stringify({
id,
text,
idea_id: ideaId
})
};
return fetch(`${endpoint + featuresAPIRoute}/create`, configuration)
.then(dispatch(addFeature(ideaId, featureData)))
.catch(err => console.log(err));
};
};
export const editFeature = (id, updates) => ({
type: EDIT_FEATURE,
id,
updates
});
export const startEditFeature = (ideaId, featureUpdates) => {
return dispatch => {
const { id, text } = featureUpdates;
const configuration = {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
authorization: localStorage.getItem('token')
},
body: JSON.stringify({
id,
text
})
};
return fetch(`${endpoint + featuresAPIRoute}/update`, configuration)
.then(dispatch(editFeature(ideaId, featureUpdates)))
.catch(err => console.log(err));
};
};
export const deleteFeature = ({ id }) => ({
type: DELETE_FEATURE,
id
});
export const startDeleteFeature = ({ id }) => {
return dispatch => {
const configuration = {
method: 'DELETE',
headers: {
'Content-Type': 'application/json',
authorization: localStorage.getItem('token')
},
body: JSON.stringify({
id
})
};
return fetch(`${endpoint + featuresAPIRoute}/delete`, configuration)
.then(dispatch(deleteFeature({ id })))
.catch(err => console.log(err));
};
};
export const setFeatures = features => ({
type: SET_FEATURES,
features
});
export const startSetFeatures = () => {
return dispatch => {
const configuration = {
method: 'GET',
headers: {
'Content-Type': 'application/json',
authorization: localStorage.getItem('token')
}
};
return fetch(`${endpoint + featuresAPIRoute}/user`, configuration)
.then(res => res.json())
.then(json => dispatch(setFeatures(json)))
.catch(err => console.log(err));
};
};
export const updateFeaturesAfterIdeaDelete = id => ({
type: UPDATE_FEATURES_AFTER_IDEA_DELETE,
id
});
export const startUpdateFeaturesAfterIdeaDelete = id => {
return dispatch => {
const configuration = {
method: 'DELETE',
headers: {
'Content-Type': 'application/json',
authorization: localStorage.getItem('token')
},
body: JSON.stringify({
id
})
};
return fetch(`${endpoint + featuresAPIRoute}/clear`, configuration)
.then(dispatch(updateFeaturesAfterIdeaDelete(id)))
.catch(err => console.log(err));
};
};
|
from bs4 import BeautifulSoup
from urllib.parse import urlparse
from urllib.request import Request, urlopen
import json
with open("bookmarks.html", encoding="utf8") as fp:
soup = BeautifulSoup(fp, 'html.parser')
bmap = dict()
# header = {
# 'User-Agent':
# 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:23.0) Gecko/20100101 Firefox/23.0'
# }
output_file_path = "Output.txt"
with open(output_file_path, "w+", encoding="utf8") as text_file:
for tag in soup.find_all('a'):
url = tag['href']
# check if connection available
# req = Request(url, headers=header)
# try:
# response = urlopen(req)
# if response.getcode() != 200:
# print(url + ' '+ str(response.getcode()))
# except Exception as e:
# print('Could not reach: ' + url + '\n')
o = urlparse(url)
hostname = o.hostname
if hostname in bmap.keys():
blist = bmap.get(hostname)
blist.append(url)
bmap.update({hostname: blist})
else:
bmap.update({hostname: [url]})
# text_file.write("%s\n%s\n%s\n\n" % (tag.contents[0], url, o.hostname))
print(len(bmap))
with open('bookmarks.json', 'w+', encoding="utf8") as fp:
json.dump(bmap, fp, sort_keys=True)
|
import time
import argparse
from numpy import std, mean
from hks_pylib.files.generator import BMPImageGenerator, BytesGenerator
from _simulator.base.match import SecurePatternMatching
class Match:
def __init__(self, parser: argparse.ArgumentParser):
self.parser = parser
self.parser.add_argument("input1", help="The first file path")
self.parser.add_argument("input2", help="The second file path")
self.parser.add_argument(
"--bmp",
help="Treat the files as the BMP image files.",
action="store_true"
)
self.parser.add_argument(
"--round",
"-r",
help="The number of rounds which you want to perform "
"the evaluation. By default, round is 10.",
type=int,
default=10
)
def run(self, args):
if not args.bmp:
generator_cls = BytesGenerator
else:
generator_cls = BMPImageGenerator
generator1 = generator_cls(args.input1)
generator2 = generator_cls(args.input2)
spm = SecurePatternMatching(generator1, generator2)
elapsed_time = []
prev_result = None
for _ in range(args.round):
start = time.time()
result = spm.match()
end = time.time()
if prev_result is not None and result != prev_result:
raise Exception("Secure pattern matching is incorrect")
prev_result = result
elapsed_time.append(end - start)
print("The result of SPM is {:.2f}%".format(result * 100))
print("The average cost is {:.3f}s.".format(mean(elapsed_time)))
print("The standard deviation is {:.5f}s.".format(std(elapsed_time)))
|
public override void Mutate()
{
// Obtain the genes from the chromosome
Gene[] genes = GetGenes();
// Randomly select one or more genes to mutate
int genesToMutate = RandomNumber(1, genes.Length); // Randomly select the number of genes to mutate
for (int i = 0; i < genesToMutate; i++)
{
int geneIndex = RandomNumber(0, genes.Length); // Randomly select the index of the gene to mutate
genes[geneIndex].Mutate(); // Perform the mutation on the selected gene
}
// Update the chromosome with the mutated genes
SetGenes(genes);
}
// Helper method to generate a random number within a specified range
private int RandomNumber(int min, int max)
{
Random random = new Random();
return random.Next(min, max);
} |
import re
from flask import Flask, request
from sklearn.feature_extraction.text import CountVectorizer
app = Flask(__name__)
@app.route('/', methods=['POST'])
def classify():
message = request.form['message']
# Your code here
if prediction == 1:
result = 'spam'
else:
result = 'ham'
return result
if __name__ == '__main__':
app.run() |
<filename>model/redirectRequest.ts
/**
* Gr4vy API
* Welcome to the Gr4vy API reference documentation. Our API is still very much a work in product and subject to change.
*
* The version of the OpenAPI document: 1.1.0-beta
* Contact: <EMAIL>
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
import { RequestFile } from './models';
/**
* Request to use a redirect payment method in a transaction.
*/
export class RedirectRequest {
'method': string;
/**
* The redirect URL to redirect a buyer to after they have authorized their transaction.
*/
'redirectUrl': string;
/**
* The ISO-4217 currency code to use this payment method for. This is used to select the payment service to use.
*/
'currency': string;
/**
* The 2-letter ISO code of the country to use this payment method for. This is used to select the payment service to use.
*/
'country': string;
/**
* An external identifier that can be used to match the account against your own records.
*/
'externalIdentifier'?: string | null;
/**
* The ID of the buyer to associate this payment method to. If this field is provided then the `buyer_external_identifier` field needs to be unset.
*/
'buyerId'?: string;
/**
* The `external_identifier` of the buyer to associate this payment method to. If this field is provided then the `buyer_id` field needs to be unset.
*/
'buyerExternalIdentifier'?: string;
static discriminator: string | undefined = undefined;
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
{
"name": "method",
"baseName": "method",
"type": "string"
},
{
"name": "redirectUrl",
"baseName": "redirect_url",
"type": "string"
},
{
"name": "currency",
"baseName": "currency",
"type": "string"
},
{
"name": "country",
"baseName": "country",
"type": "string"
},
{
"name": "externalIdentifier",
"baseName": "external_identifier",
"type": "string"
},
{
"name": "buyerId",
"baseName": "buyer_id",
"type": "string"
},
{
"name": "buyerExternalIdentifier",
"baseName": "buyer_external_identifier",
"type": "string"
} ];
static getAttributeTypeMap() {
return RedirectRequest.attributeTypeMap;
}
}
|
# _*_ coding: utf-8 _*_
"""
Created by Allen7D on 2018/5/31.
"""
from functools import wraps
from flasgger import swag_from
__author__ = 'Allen7D'
class RedPrint:
def __init__(self, name, description, api_doc=None, alias=''):
self.name = name
self.alias = alias # 接口的别名
self.description = description
self.mound = []
self.api_doc = api_doc
def route(self, rule, **options):
def decorator(f):
self.mound.append((f, rule, options))
return f
return decorator
def register(self, bp, url_prefix=None):
if url_prefix is None:
url_prefix = '/' + self.name
for f, rule, options in self.mound:
endpoint = self.name + '+' + options.pop("endpoint", f.__name__)
bp.add_url_rule(url_prefix + rule, endpoint, f, **options)
'''
装饰器执行的顺序(初始化会被执行至 f层;如同洋葱层)
装饰器到底要的是什么?无非是对函数进行包裹 & 获取函数信息
对装饰器完善; doc改为swag_from就更好理解
对于第三方的装饰器,如何「扩张其功能」
==> 能不能写一个修饰「装饰器」的函数
'''
def doc(self, *_args, **_kwargs):
def decorator(f):
specs = getattr(self.api_doc, f.__name__, None)
if specs:
specs['tags'] = [self.tag['name']]
# 对f.__doc__处理
if f.__doc__ and '\n\t' in f.__doc__:
f.__doc__ = f.__doc__.split('\n\t')[0]
@swag_from(specs=specs)
@wraps(f)
def wrapper(*args, **kwargs):
return f(*args, **kwargs)
return wrapper
else:
@wraps(f)
def wrapper(*args, **kwargs):
return f(*args, **kwargs)
return wrapper
return decorator
@property
def tag(self):
return {
'name': self.alias if self.alias else self.name,
'description': self.description
}
|
#!/bin/bash
#SBATCH -J Act_maxsig_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=6000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/sequence_tagging/arg_min/PE-my.py maxsig 401 Adagrad 3 0.7407753542185745 0.014049308979953747 varscaling 0.05
|
#!/bin/bash
source venv/bin/activate
flask init_db
flask populate_db_test
exec gunicorn -b :5000 --access-logfile - --error-logfile - app_source:app
|
<reponame>tlylt/markbind<filename>packages/vue-components/src/utils/pubsub.js
const subscribers = {};
export function subscribe(event, handler) {
if (!subscribers[event]) {
subscribers[event] = [];
}
subscribers[event].push(handler);
}
export function publish(event) {
if (!subscribers[event]) {
return;
}
subscribers[event].forEach(handler => handler());
}
|
import requests
def get_html(url):
response = requests.get(url)
if response.status_code == 200:
return response.text
else:
return None
html = get_html("www.example.com")
if html:
print(html) |
import pandas as pd
# import data
data = pd.read_csv('data.csv')
# split data into input and output variables
X = data[['City', 'Population']]
y = data['Crime_Rate']
# one-hot encode variables
X = pd.get_dummies(X)
# Normalize the data
X = (X - X.mean()) / X.std()
# initialize a model
model = LinearRegression()
# Train the model
model.fit(X, y) |
import fetch from '@/fetch'
export function getList(params) {
return fetch({
url: '/vue-admin-template/table/list',
method: 'get',
params
})
}
|
#!/bin/bash
if [ ! -d /jellyfin ]; then
echo "Creating /jellyfin"
mkdir -p /jellyfin
chown -R abc:abc /jellyfin
fi
if [ ! -d /share/storage/tv ]; then
echo "Creating /share/storage/tv"
mkdir -p /share/storage/tv
chown -R abc:abc /share/storage/tv
fi
if [ ! -d /share/storage/movies ]; then
echo "Creating /share/storage/movies"
mkdir -p /share/storage/movies
chown -R abc:abc /share/storage/movies
fi
if [ ! -d /share/jellyfin ]; then
echo "Creating /share/jellyfin"
mkdir -p /share/jellyfin
chown -R abc:abc /share/jellyfin
fi
# links
if [ ! -d /jellyfin/cache ]; then
echo "Creating link for /jellyfin/cache"
mkdir -p /share/jellyfin/cache
chown -R abc:abc /share/jellyfin/cache
ln -s /share/jellyfin/cache /jellyfin/cache
fi
if [ -d /config/jellyfin ]; then
echo "Moving to new location /config/addons_config/jellyfin"
mkdir -p /config/addons_config/jellyfin
chown -R abc:abc /config/addons_config/jellyfin
mv /config/jellyfin/* /config/addons_config/jellyfin/
rm /config/jellyfin
fi
if [ ! -d /config/addons_config/jellyfin ]; then
echo "Creating /config/addons_config/jellyfin"
mkdir -p /config/addons_config/jellyfin
chown -R abc:abc /config/addons_config/jellyfin
fi
if [ ! -d /jellyfin/data ]; then
echo "Creating link for /jellyfin/data"
mkdir -p /share/jellyfin/data
chown -R abc:abc /share/jellyfin/data
ln -s /share/jellyfin/data /jellyfin/data
fi
if [ ! -d /jellyfin/logs ]; then
echo "Creating link for /jellyfin/logs"
mkdir -p /share/jellyfin/logs
chown -R abc:abc /share/jellyfin/logs
ln -s /share/jellyfin/logs /jellyfin/logs
fi
if [ ! -d /jellyfin/metadata ]; then
echo "Creating link for /jellyfin/metadata"
mkdir -p /share/jellyfin/metadata
chown -R abc:abc /share/jellyfin/metadata
ln -s /share/jellyfin/metadata /jellyfin/metadata
fi
if [ ! -d /jellyfin/plugins ]; then
echo "Creating link for /jellyfin/plugins"
mkdir -p /share/jellyfin/plugins
chown -R abc:abc /share/jellyfin/plugins
ln -s /share/jellyfin/plugins /jellyfin/plugins
fi
if [ ! -d /jellyfin/root ]; then
echo "Creating link for /jellyfin/root"
mkdir -p /share/jellyfin/root
chown -R abc:abc /share/jellyfin/root
ln -s /share/jellyfin/root /jellyfin/root
fi
|
# Get current work dir
WORK_DIR=$(pwd)
# Import global variables
source $WORK_DIR/scripts/config/env.sh
PYTHONPATH=$PYTHONPATH:$WORK_DIR python scripts/figures/figure6/pipeswitch_1s/host_run_data.py $WORK_DIR/scripts/config/servers.txt |
/* ct-kip.h include file for the PKCS #11 Mechanisms for the
* Cryptographic Token Key Initialization Protocol OTPS document.
*/
/* $Revision: 1.3 $ */
/* License to copy and use this software is granted provided that it is
* identified as "RSA Security Inc. Cryptographic Token Key Initialization
* Protocol (CT-KIP)" in all material mentioning or referencing this software.
* RSA Security Inc. makes no representations concerning either the
* merchantability of this software or the suitability of this software for
* any particular purpose. It is provided "as is" without express or implied
* warranty of any kind.
*/
/* This file is preferably included after inclusion of pkcs11.h */
#ifndef _CT_KIP_H_
#define _CT_KIP_H_ 1
/* Are the definitions of this file already included in pkcs11t.h? */
#ifndef CKM_KIP_DERIVE
#ifdef __cplusplus
extern "C" {
#endif
/* Mechanism Identifiers */
#define CKM_KIP_DERIVE 0x00000510
#define CKM_KIP_WRAP 0x00000511
#define CKM_KIP_MAC 0x00000512
/* Structures */
typedef struct CK_KIP_PARAMS {
CK_MECHANISM_PTR pMechanism;
CK_OBJECT_HANDLE hKey;
CK_BYTE_PTR pSeed;
CK_ULONG ulSeedLen;
} CK_KIP_PARAMS;
typedef CK_KIP_PARAMS CK_PTR CK_KIP_PARAMS_PTR;
#ifdef __cplusplus
}
#endif
#endif
#endif
|
<reponame>frc1418/2014
from pyfrc import wpilib
from pyfrc.physics import drivetrains
import math
class PhysicsEngine(object):
'''
Useful simulation pieces for testing our robot code
'''
def __init__(self, physics_controller):
self.physics_controller = physics_controller
self.winch_value = 0
self.winch_min = 2.5 # top
self.winch_max = 4.1 # bottom
self.n = 0
self.winch_position = self.winch_min
self.winch_range = self.winch_max - self.winch_min
self.motor_tm = None
def update_sim(self, now, tm_diff):
motor = wpilib.CAN._devices[5]
motor.forward_ok = True
# when the dog is let out, then position will never go down
dog_out = (wpilib.Solenoid._channels[1].value == True)
# let the winch out!
if dog_out:
if self.winch_position > self.winch_min:
self.winch_position += self.winch_range * tm_diff * -3
else:
# calculate winch based on motor value
if self.winch_position <= self.winch_max:
self.winch_position += motor.value * self.winch_range * tm_diff * .7
else:
motor.forward_ok = False
# potentiometer value is position
wpilib.AnalogModule._channels[3].voltage = self.winch_position
# calculate the voltage/current
if motor.value == 0 or motor.forward_ok == False:
self.motor_tm = None
motor.voltage = 0
motor.current = 0
self.n += 1
else:
# if motor is running, voltage is constant (probably not realistic)
motor.voltage = motor.value * 12.5
if self.motor_tm is None:
self.motor_tm = 0
else:
self.motor_tm += tm_diff
# some equation that makes a pretty graph
motor.current = motor.value * math.sin(self.n + 8*self.motor_tm) + 3*self.motor_tm
# Simulate the drivetrain
lf_motor = wpilib.DigitalModule._pwm[0].Get() * -1
lr_motor = wpilib.DigitalModule._pwm[1].Get() * -1
rr_motor = wpilib.DigitalModule._pwm[2].Get()
rf_motor = wpilib.DigitalModule._pwm[3].Get()
# Our robot's wheels are wrong, so switch y/x, and invert everything
vy, vx, vw = drivetrains.mecanum_drivetrain(lr_motor, rr_motor, lf_motor, rf_motor)
self.physics_controller.vector_drive(-vx, vy, -vw, tm_diff)
|
<reponame>lufrai/alo
import { StoreInterface } from "../store/types";
import { Action } from "../action/types";
export type NormalizeOptions = {
action: Action;
callBack: (action: Action) => Action | undefined;
store: StoreInterface;
};
export interface ActionNormalizerInterface {
normalize(options: NormalizeOptions): Action | undefined;
}
|
from abc import ABC, abstractmethod
from typing import Dict, List, Any
class Template(ABC):
@property
@abstractmethod
def env(self) -> Any: pass
@property
@abstractmethod
def paths(self) -> List[str]: pass
@property
@abstractmethod
def context_functions(self) -> Dict: pass
@property
@abstractmethod
def context_filters(self) -> Dict: pass
@property
@abstractmethod
def filters(self) -> Dict: pass
@property
@abstractmethod
def tests(self) -> Dict: pass
|
# Protect against non-zsh execution of Oh My Zsh (use POSIX syntax here)
[ -n "$ZSH_VERSION" ] || {
# ANSI formatting function (\033[<code>m)
# 0: reset, 1: bold, 4: underline, 22: no bold, 24: no underline, 31: red, 33: yellow
omz_f() {
[ $# -gt 0 ] || return
IFS=";" printf "\033[%sm" $*
}
# If stdout is not a terminal ignore all formatting
[ -t 1 ] || omz_f() { :; }
omz_ptree() {
# Get process tree of the current process
pid=$$; pids="$pid"
while [ ${pid-0} -ne 1 ] && ppid=$(ps -e -o pid,ppid | awk "\$1 == $pid { print \$2 }"); do
pids="$pids $pid"; pid=$ppid
done
# Show process tree
case "$(uname)" in
Linux) ps -o ppid,pid,command -f -p $pids 2>/dev/null ;;
Darwin|*) ps -o ppid,pid,command -p $pids 2>/dev/null ;;
esac
# If ps command failed, try Busybox ps
[ $? -eq 0 ] || ps -o ppid,pid,comm | awk "NR == 1 || index(\"$pids\", \$2) != 0"
}
{
shell=$(ps -o pid,comm | awk "\$1 == $$ { print \$2 }")
printf "$(omz_f 1 31)Error:$(omz_f 22) Oh My Zsh can't be loaded from: $(omz_f 1)${shell}$(omz_f 22). "
printf "You need to run $(omz_f 1)zsh$(omz_f 22) instead.$(omz_f 0)\n"
printf "$(omz_f 33)Here's the process tree:$(omz_f 22)\n\n"
omz_ptree
printf "$(omz_f 0)\n"
} >&2
return 1
}
# If ZSH is not defined, use the current script's directory.
[[ -z "$ZSH" ]] && export ZSH="${${(%):-%x}:a:h}"
# Set ZSH_CACHE_DIR to the path where cache files should be created
# or else we will use the default cache/
if [[ -z "$ZSH_CACHE_DIR" ]]; then
ZSH_CACHE_DIR="$ZSH/cache"
fi
# Make sure $ZSH_CACHE_DIR is writable, otherwise use a directory in $HOME
if [[ ! -w "$ZSH_CACHE_DIR" ]]; then
ZSH_CACHE_DIR="${XDG_CACHE_HOME:-$HOME/.cache}/oh-my-zsh"
fi
# Create cache and completions dir and add to $fpath
mkdir -p "$ZSH_CACHE_DIR/completions"
(( ${fpath[(Ie)"$ZSH_CACHE_DIR/completions"]} )) || fpath=("$ZSH_CACHE_DIR/completions" $fpath)
# Check for updates on initial load...
if [ "$DISABLE_AUTO_UPDATE" != "true" ]; then
source $ZSH/tools/check_for_upgrade.sh
fi
# Initializes Oh My Zsh
# add a function path
fpath=($ZSH/functions $ZSH/completions $fpath)
# Load all stock functions (from $fpath files) called below.
autoload -U compaudit compinit
# Set ZSH_CUSTOM to the path where your custom config files
# and plugins exists, or else we will use the default custom/
if [[ -z "$ZSH_CUSTOM" ]]; then
ZSH_CUSTOM="$ZSH/custom"
fi
is_plugin() {
local base_dir=$1
local name=$2
builtin test -f $base_dir/plugins/$name/$name.plugin.zsh \
|| builtin test -f $base_dir/plugins/$name/_$name
}
# Add all defined plugins to fpath. This must be done
# before running compinit.
for plugin ($plugins); do
if is_plugin $ZSH_CUSTOM $plugin; then
fpath=($ZSH_CUSTOM/plugins/$plugin $fpath)
elif is_plugin $ZSH $plugin; then
fpath=($ZSH/plugins/$plugin $fpath)
else
echo "[oh-my-zsh] plugin '$plugin' not found"
fi
done
# Figure out the SHORT hostname
if [[ "$OSTYPE" = darwin* ]]; then
# macOS's $HOST changes with dhcp, etc. Use ComputerName if possible.
SHORT_HOST=$(scutil --get ComputerName 2>/dev/null) || SHORT_HOST=${HOST/.*/}
else
SHORT_HOST=${HOST/.*/}
fi
# Save the location of the current completion dump file.
if [ -z "$ZSH_COMPDUMP" ]; then
ZSH_COMPDUMP="${ZDOTDIR:-${HOME}}/.zcompdump-${SHORT_HOST}-${ZSH_VERSION}"
fi
# Construct zcompdump OMZ metadata
zcompdump_revision="#omz revision: $(builtin cd -q "$ZSH"; git rev-parse HEAD 2>/dev/null)"
zcompdump_fpath="#omz fpath: $fpath"
# Delete the zcompdump file if OMZ zcompdump metadata changed
if ! command grep -q -Fx "$zcompdump_revision" "$ZSH_COMPDUMP" 2>/dev/null \
|| ! command grep -q -Fx "$zcompdump_fpath" "$ZSH_COMPDUMP" 2>/dev/null; then
command rm -f "$ZSH_COMPDUMP"
zcompdump_refresh=1
fi
if [[ $ZSH_DISABLE_COMPFIX != true ]]; then
source $ZSH/lib/compfix.zsh
# If completion insecurities exist, warn the user
handle_completion_insecurities
# Load only from secure directories
compinit -i -C -d "${ZSH_COMPDUMP}"
else
# If the user wants it, load from all found directories
compinit -u -C -d "${ZSH_COMPDUMP}"
fi
# Append zcompdump metadata if missing
if (( $zcompdump_refresh )); then
# Use `tee` in case the $ZSH_COMPDUMP filename is invalid, to silence the error
# See https://github.com/ohmyzsh/ohmyzsh/commit/dd1a7269#commitcomment-39003489
tee -a "$ZSH_COMPDUMP" &>/dev/null <<EOF
$zcompdump_revision
$zcompdump_fpath
EOF
fi
unset zcompdump_revision zcompdump_fpath zcompdump_refresh
# Load all of the config files in ~/oh-my-zsh that end in .zsh
# TIP: Add files you don't want in git to .gitignore
for config_file ($ZSH/lib/*.zsh); do
custom_config_file="${ZSH_CUSTOM}/lib/${config_file:t}"
[ -f "${custom_config_file}" ] && config_file=${custom_config_file}
source $config_file
done
# Load all of the plugins that were defined in ~/.zshrc
for plugin ($plugins); do
if [ -f $ZSH_CUSTOM/plugins/$plugin/$plugin.plugin.zsh ]; then
source $ZSH_CUSTOM/plugins/$plugin/$plugin.plugin.zsh
elif [ -f $ZSH/plugins/$plugin/$plugin.plugin.zsh ]; then
source $ZSH/plugins/$plugin/$plugin.plugin.zsh
fi
done
# Load all of your custom configurations from custom/
for config_file ($ZSH_CUSTOM/*.zsh(N)); do
source $config_file
done
unset config_file
# Load the theme
if [ ! "$ZSH_THEME" = "" ]; then
if [ -f "$ZSH_CUSTOM/$ZSH_THEME.zsh-theme" ]; then
source "$ZSH_CUSTOM/$ZSH_THEME.zsh-theme"
elif [ -f "$ZSH_CUSTOM/themes/$ZSH_THEME.zsh-theme" ]; then
source "$ZSH_CUSTOM/themes/$ZSH_THEME.zsh-theme"
else
source "$ZSH/themes/$ZSH_THEME.zsh-theme"
fi
fi
# Alias the `ls` command
# alias ls="ls -lhF" |
'use strict';
const { Command, SimplicityEmbed } = require('@structures');
const DisableCommand = require('./disable');
const LanguageCommand = require('./language');
const PrefixCommand = require('./prefix');
const StarboardCommand = require('./starboard');
class Config extends Command {
constructor(client) {
super(client, 'config', {
aliases: ['configuration', 'serversettings', 's', 'serverconfig', 'serverconfiguration'],
category: 'module',
cooldown: 5000,
requirements: {
guildOnly: true,
permissions: ['MANAGE_GUILD'],
requireDatabase: true,
},
});
this.registerSubCommand(PrefixCommand, { aliases: ['p', 'setp', 'setprefix'] });
this.registerSubCommand(LanguageCommand, { aliases: ['l', 'lang', 'setlang', 'setlanguage', 'setl'] });
this.registerSubCommand(StarboardCommand, { aliases: ['star', 's', 'setstarboard'] });
this.registerSubCommand(DisableCommand, {
aliases: ['disablecommands', 'disable-command', 'disablecmd', 'cmddisable'],
name: 'disablecommand',
});
}
run({ guild, guildData, author, language, prefix, send, t }) {
const channel = guild.channels.cache.find((c) => c.id === guildData.starboard);
const disableChannels = guildData.disableChannels
.map((id) => guild.channels.cache.get(id))
.filter((ch) => ch)
.map((ch) => `${ch}`);
const text = disableChannels.length ? disableChannels.join(', ') : '$$commands:config.noDisableChannel';
const embed = new SimplicityEmbed({ author, t })
.addField('» $$commands:config.prefix', prefix, true)
.addField('» $$commands:config.language', language, true)
.addField('» $$commands:config.starboard', channel ? `${channel}` : '$$commands:config.moduleOff', true)
.addField('» $$commands:config.disableChannels', text);
return send(embed);
}
}
module.exports = Config;
|
<filename>test/definition.js<gh_stars>1-10
import describe from 'tape-bdd';
import Self from 'src/value-object';
describe('ValueObject definition', (it) => {
it('throws an exception if it has no name', (assert) => {
assert.throws(() => Self.define());
Self.clearDatabase();
});
it('throws an exception if it has no definition', (assert) => {
assert.throws(() => Self.define('ValueObject'));
Self.clearDatabase();
});
it('throws an exception if validate is missing', (assert) => {
assert.throws(() => Self.define('ValueObject', {}));
Self.clearDatabase();
});
it('throws an exception if validate is not a function', (assert) => {
assert.throws(() => Self.define('ValueObject', { validate: true }));
Self.clearDatabase();
});
it('returns a constructor', (assert) => {
const ValueObject = Self.define('ValueObject', { validate: () => true });
assert.ok(new ValueObject('test'));
Self.clearDatabase();
});
it('constructor returns an object of type ValueObject', (assert) => {
const ValueObject = Self.define('ValueObject', { validate: () => true });
const object = new ValueObject('test');
assert.ok(object instanceof Self);
Self.clearDatabase();
});
it('constructor returns an object of the created type', (assert) => {
const CustomValueObject = Self.define('ValueObject', { validate: () => true });
const object = new CustomValueObject('test');
assert.ok(object instanceof CustomValueObject);
Self.clearDatabase();
});
it('constructor returns an object with the definition properties', (assert) => {
const ValueObject = Self.define('ValueObject', { validate: () => true });
const object = new ValueObject('test');
assert.ok(object.validate());
Self.clearDatabase();
});
});
|
#!/bin/bash -x
#
# Generated - do not edit!
#
# Macros
TOP=`pwd`
CND_PLATFORM=GNU-Linux
CND_CONF=Debug
CND_DISTDIR=dist
CND_BUILDDIR=build
CND_DLIB_EXT=so
NBTMPDIR=${CND_BUILDDIR}/${CND_CONF}/${CND_PLATFORM}/tmp-packaging
TMPDIRNAME=tmp-packaging
OUTPUT_PATH=${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/libplc.a
OUTPUT_BASENAME=libplc.a
PACKAGE_TOP_DIR=PLC/
# Functions
function checkReturnCode
{
rc=$?
if [ $rc != 0 ]
then
exit $rc
fi
}
function makeDirectory
# $1 directory path
# $2 permission (optional)
{
mkdir -p "$1"
checkReturnCode
if [ "$2" != "" ]
then
chmod $2 "$1"
checkReturnCode
fi
}
function copyFileToTmpDir
# $1 from-file path
# $2 to-file path
# $3 permission
{
cp "$1" "$2"
checkReturnCode
if [ "$3" != "" ]
then
chmod $3 "$2"
checkReturnCode
fi
}
# Setup
cd "${TOP}"
mkdir -p ${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package
rm -rf ${NBTMPDIR}
mkdir -p ${NBTMPDIR}
# Copy files and create directories and links
cd "${TOP}"
makeDirectory "${NBTMPDIR}/PLC/lib"
copyFileToTmpDir "${OUTPUT_PATH}" "${NBTMPDIR}/${PACKAGE_TOP_DIR}lib/${OUTPUT_BASENAME}" 0644
# Generate tar file
cd "${TOP}"
rm -f ${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package/PLC.tar
cd ${NBTMPDIR}
tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package/PLC.tar *
checkReturnCode
# Cleanup
cd "${TOP}"
rm -rf ${NBTMPDIR}
|
module PoolParty
module Resources
class File < Resource
default_options({
:ensure => "file",
:mode => 644
# :owner => "#{Base.user}"
})
def disallowed_options
[:name, :template, :cwd]
end
def source(arg=nil)
arg ? options[:source] = arg : "#{Base.fileserver_base}/#{::File.basename(name)}"
end
end
end
end |
from rofl.config import createConfig, createAgent, getEnvMaker
from rofl.algorithms.a2c import algConfig
from rofl.config.config import createNetwork, createPolicy
envConfig = {
'envMaker' : 'gymEnvMaker',
'name': 'LunarLanderContinuous-v2',
'atari': False,
'max_length': 500,
'warmup' : None,
}
agentCnf = {
'agentClass' : 'agentMaster',
}
policyCnf = {
'policyClass' : 'pgPolicy',
'continuos' : True,
'network' : {
'networkClass' : 'gymActor',
'linear_1' : 56,
}
}
thisCnf = {
'env': envConfig,
'policy' : policyCnf,
#'agent' : agentCnf,
}
if __name__ == '__main__':
config = createConfig(algConfig, thisCnf, expName='a2c')
eMaker = getEnvMaker(config)
actor = createNetwork(config)
policy = createPolicy(config, actor)
agent = createAgent(config, policy, eMaker)
try:
for _ in range(10 ** 6):
agent.fullStep()
except KeyboardInterrupt:
print('Done by keyboard...')
print('oks')
agent.close()
|
import Foundation
class ProgressDispatcher: ProgressDispatcher {
override func updateProgress(_ progress: Progress) {
let dispatchedProgress = dispatcher.dispatch(progress)
// Perform any additional operations with dispatchedProgress
}
override func updateError(_ error: NSError) {
let dispatchedProgress = dispatcher.dispatch(Progress(), error: error)
// Perform any additional operations with dispatchedProgress
}
}
extension ProgressDispatcher: Dispatcher {
func dispatch(_ value: Progress) -> Progress {
return value
}
func dispatch(_ value: Progress, error: NSError) -> Progress {
// Handle error and return the updated progress
return value
}
} |
"""
Make plots of the Radial(n,m,r) function
"""
from poptics.zernike import radial
import matplotlib.pyplot as plt
from poptics.tio import getInt
import numpy as np
def main():
# Get mex value
n = getInt("n value",4,0)
rData = np.linspace(0.0,1.0,100) # rdata is rage 0 > 1.0
# Plot out for polynomial for the legal range on m
for m in range(n%2,n + 1,2):
plt.plot(rData,radial(n,m,rData),label = "m : {0:d}".format(m))
# Make a standard plot.
plt.title("Plot of Radial({0:d},m)".format(n))
plt.legend(loc = "upper left", fontsize = "small")
plt.xlim([0,1])
plt.ylim([-1,1.2])
plt.xlabel("Radius")
plt.ylabel("Value")
plt.show()
if __name__ == "__main__":
main()
|
const Song = require('../models/song');
module.exports = {
create,
index,
deleteSong
}
async function create(req, res){
console.log('we are here', req.body)
try {
const song = await Song.create({title: req.body.songTitle, artist: req.body.songArtist, user: req.user});
res.status(201).json({song: song})
} catch(err){
console.log(err)
res.json({data: err})
}
}
async function index(req, res){
try {
const songs = await Song.find({}).populate('user').exec()
res.status(200).json({songs})
} catch(err){
}
}
async function deleteSong(req, res){
console.log(req.params)
try {
const song = await Song.findByIdAndDelete(req.params.id);
// song.remove(req.params._id)
console.log(song, 'this is song')
res.status(201).json({song: song})
} catch(err){
console.log(err)
res.json({data: err})
}
} |
<filename>sql/pretty9.sql<gh_stars>10-100
SELECT
*
FROM
t0
NATURAL JOIN t1
|
<filename>javaSources/Miscellaneous/Stream.java
import java.util.ArrayList;
import java.util.Arrays;
public class Stream {
public static void main(String[] args) {
/**
* create stream using of () methods.
*/
Stream<String> stream = stream.of("a" , "b" , "c");
/**
* create stream using Array.
*/
String[] StringArr = new String[] {"J","A","V","A"};
Stream String arrToStream = Arrays.stream(StringArr);
/**
* Create stream using collection list.
*/
ArrayList String list = new ArrayList();
list.add("J");
list.add("A");
list.add("V");
list.add("A");
Stream stream1 = list.stream();
}
System.out.println(stream1);
}
|
runid=$1
ln -s ctb.seg.train ../data/ctb.train.$runid
ln -s ctb.seg.dev ../data/ctb.test.$runid
./SegParser train test train-file:../data/ctb.train.$runid model-name:../runs/ctb.model.$runid test-file:../data/ctb.test.$runid output-file:../runs/ctb.out.$runid seed:14 earlystop:40 evalpunc:false C:0.001 train-converge:300 test-converge:300 $@ | tee ../runs/ctb.log.$runid
rm ../data/ctb.train.$runid
rm ../data/ctb.test.$runid
|
<filename>utils/config_simple.py
import re
import common
from base.config import Config
class SimpleConfig(Config):
def _parse_value(self, value: str):
if common.str_compare_head_tail(value, '"') or common.str_compare_head_tail(value, '\''):
return value[1:-1]
else:
return value
def _parse_object(self, line):
obj = super()._parse_object(line)
line_s = line.strip()
if line_s != '':
match = re.match('^(?P<key>[^#\s]*?)\s*=\s*(?P<value>.*?)$', line_s)
if match is not None:
obj.extra.update(match.groupdict())
obj.value = self._parse_value(obj['value'])
return obj
def _map_object(self, obj):
if obj['key'] is not None:
self.set_map(obj['key'], obj)
if __name__ == '__main__':
with open('../test/ifcfg') as io:
content = io.read()
c = SimpleConfig(content)
print(c)
with open('../test/grub') as io:
content = io.read()
c = SimpleConfig(content)
print(c)
|
#!/bin/bash
# /**
# * Copyright by Ruman Gerst
# * Research Group Applied Systems Biology - Head: Prof. Dr. Marc Thilo Figge
# * https://www.leibniz-hki.de/en/applied-systems-biology.html
# * HKI-Center for Systems Biology of Infection
# * Leibniz Institute for Natural Product Research and Infection Biology - Hans Knöll Insitute (HKI)
# * Adolf-Reichwein-Straße 23, 07745 Jena, Germany
# *
# * This code is licensed under BSD 2-Clause
# * See the LICENSE file provided with this code for the full license.
# */
# Downloads
MISA_IMAGEJ_SOURCES="https://github.com/applied-systems-biology/misa-imagej/archive/master.zip"
MAVEN_DOWNLOAD="ftp://ftp.fu-berlin.de/unix/www/apache/maven/maven-3/3.6.0/binaries/apache-maven-3.6.0-bin.zip"
FIJI_DOWNLOAD="https://downloads.imagej.net/fiji/latest/fiji-win64.zip"
function download_if_not_exist {
if [ ! -e $2 ]; then
if [ ! -e $2.zip ]; then
wget --no-check-certificate -O $2.zip $1 || { echo "Download of $1 failed" ; exit 1; } # --no-check-certificate is needed because anti-viruses break the certificate chain
fi
unzip -o $2.zip || { echo "Extracting $2.zip failed" ; exit 1; }
fi
}
# Download Maven
download_if_not_exist $MAVEN_DOWNLOAD "apache-maven"
mv apache-maven-* apache-maven
MAVEN_EXECUTABLE="$PWD/apache-maven/bin/mvn"
# Download and extract MISA++ for ImageJ
download_if_not_exist $MISA_IMAGEJ_SOURCES "misa-imagej-master"
pushd misa-imagej-master
$MAVEN_EXECUTABLE package || { echo 'Building MISA++ for ImageJ failed' ; exit 1; }
popd
# Create the target dir
rm -r misa-imagej-package
mkdir misa-imagej-package
mkdir -p misa-imagej-package/plugins
mkdir -p misa-imagej-package/jars
# Create README
cat >misa-imagej-package/README.txt << EOL
MISA++ for ImageJ
-----------------
Installation
=================
Copy all files in ./plugins/ and ./jars/ into their respective directories within the Fiji app folder
Usage
=================
Navigate to Plugins > MISA++ for ImageJ
Copyright
=================
Copyright by Ruman Gerst
Research Group Applied Systems Biology - Head: Prof. Dr. Marc Thilo Figge
https://www.leibniz-hki.de/en/applied-systems-biology.html
HKI-Center for Systems Biology of Infection
Leibniz Institute for Natural Product Research and Infection Biology - Hans Knöll Insitute (HKI)
Adolf-Reichwein-Straße 23, 07745 Jena, Germany
The project code is licensed under BSD 2-Clause.
See the LICENSE.txt file provided with the code for the full license.
EOL
# Create LICENSE.txt
cat >misa-imagej-package/LICENSE.txt << EOL
BSD 2-Clause License
Copyright (c) 2019, Ruman Gerst
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
EOL
# Install the MISA++ for ImageJ plugin
cp ./misa-imagej-master/target/misa_imagej-*.jar ./misa-imagej-package/plugins/
# Download missing jfreesvg
pushd ./misa-imagej-master/target/dependencies/
wget http://maven.imagej.net/content/groups/public/org/jfree/jfreesvg/3.3/jfreesvg-3.3.jar
popd
# Copy necessary dependencies
function copy_dependency {
cp -v ./misa-imagej-master/target/dependencies/$1*.jar ./misa-imagej-package/jars/
}
copy_dependency autolink
copy_dependency flexmark
copy_dependency graphics2d
copy_dependency jfreesvg
copy_dependency openhtmltopdf
copy_dependency pdfbox
copy_dependency poi
copy_dependency sqlite
copy_dependency bcprov-jdk15on
copy_dependency bcpkix-jdk15on
copy_dependency icepdf
copy_dependency commons-exec
copy_dependency xmlbeans
copy_dependency commons-collections4
|
import { Injectable } from '@angular/core';
import { BsModalService } from 'ngx-bootstrap/modal';
import {Observable} from 'rxjs/Observable';
import {AlertDialogComponent} from './alert-dialog/alert-dialog.component';
import {DomSanitizer} from "@angular/platform-browser";
import {TranslateService} from "@ngx-translate/core";
@Injectable()
export class AlertsService {
constructor(private modalService: BsModalService, private sanitized: DomSanitizer, private translate: TranslateService) { }
confirm(params: { text: string, title?: string}): Observable<'yes'|'no'|null> {
return this.openDialog(params.title || 'title', params.text, 'help', [
{label: 'actions.yes', theme: 'success', icon: 'ok'},
{label: 'actions.no', theme: 'default', icon: 'remove'}
]).map((btnNum: number) => {
if (btnNum === 0) {
return 'yes';
} else if (btnNum === 1) {
return 'no';
} else {
return null;
}
});
}
info(params: { text: string, title?: string}): void {
this.openDialog(params.title || 'title', params.text, 'noicon', [
{label: 'actions.ok', theme: 'default', icon: 'ok'},
], 'md')
}
error(params: { error: any, title?: string}): void {
let text = '';
let title = params.title || 'error.title';
if (typeof params.error === 'string') {
text = params.error;
} else {
if (params.error.exception) {
text = params.error.exception + ': ';
}
if (params.error.message) {
text += params.error.message;
}
if (params.error.error) {
title = params.error.error;
}
}
this.openDialog(title, text, 'alert-outline', [
{label: 'actions.ok', theme: 'default', icon: 'ok'}
], 'md')
}
private openDialog(title: string, text: string, icon: string, buttons: {label: string, theme: string, icon?: string}[], size = 'sm' ): Observable<number> {
const bsModalRef = this.modalService.show(AlertDialogComponent, {
animated: false, keyboard: true, backdrop: true, ignoreBackdropClick: true, class: 'modal-' + size
});
const formComponent: AlertDialogComponent = (<AlertDialogComponent>bsModalRef.content);
formComponent.title = title;
formComponent.text = this.sanitized.bypassSecurityTrustHtml(this.translate.instant(text.split('\n').join('<BR>')));
formComponent.icon = icon;
formComponent.buttons = buttons;
return formComponent.onSubmit.asObservable();
}
}
|
#!/usr/bin/env bash
if [[ $DEBUG != "" ]]; then
set -x
fi
set -o errexit
set -o pipefail
if ! [ -x node_modules/.bin/marked-man ]; then
ps=0
if [ -f .building_marked-man ]; then
pid=$(cat .building_marked-man)
ps=$(ps -p $pid | grep $pid | wc -l) || true
fi
if [ -f .building_marked-man ] && [ $ps != 0 ]; then
while [ -f .building_marked-man ]; do
sleep 1
done
else
# a race to see which make process will be the one to install marked-man
echo $$ > .building_marked-man
sleep 1
if [ $(cat .building_marked-man) == $$ ]; then
make node_modules/.bin/marked-man
rm .building_marked-man
else
while [ -f .building_marked-man ]; do
sleep 1
done
fi
fi
fi
if ! [ -x node_modules/.bin/marked ]; then
ps=0
if [ -f .building_marked ]; then
pid=$(cat .building_marked)
ps=$(ps -p $pid | grep $pid | wc -l) || true
fi
if [ -f .building_marked ] && [ $ps != 0 ]; then
while [ -f .building_marked ]; do
sleep 1
done
else
# a race to see which make process will be the one to install marked
echo $$ > .building_marked
sleep 1
if [ $(cat .building_marked) == $$ ]; then
make node_modules/.bin/marked
rm .building_marked
else
while [ -f .building_marked ]; do
sleep 1
done
fi
fi
fi
src=$1
dest=$2
name=$(basename ${src%.*})
date=$(date -u +'%Y-%M-%d %H:%m:%S')
version=$(node cli.js -v)
mkdir -p $(dirname $dest)
html_replace_tokens () {
local url=$1
sed "s|@NAME@|$name|g" \
| sed "s|@DATE@|$date|g" \
| sed "s|@URL@|$url|g" \
| sed "s|@VERSION@|$version|g" \
| perl -p -e 's/<h1([^>]*)>([^\(]*\([0-9]\)) -- (.*?)<\/h1>/<h1>\2<\/h1> <p>\3<\/p>/g' \
| perl -p -e 's/npm-npm/npm/g' \
| perl -p -e 's/([^"-])(npm-)?README(?!\.html)(\(1\))?/\1<a href="..\/..\/doc\/README.html">README<\/a>/g' \
| perl -p -e 's/<title><a href="[^"]+README.html">README<\/a><\/title>/<title>README<\/title>/g' \
| perl -p -e 's/([^"-])([^\(> ]+)(\(1\))/\1<a href="..\/cli\/\2.html">\2\3<\/a>/g' \
| perl -p -e 's/([^"-])([^\(> ]+)(\(3\))/\1<a href="..\/api\/\2.html">\2\3<\/a>/g' \
| perl -p -e 's/([^"-])([^\(> ]+)(\(5\))/\1<a href="..\/files\/\2.html">\2\3<\/a>/g' \
| perl -p -e 's/([^"-])([^\(> ]+)(\(7\))/\1<a href="..\/misc\/\2.html">\2\3<\/a>/g' \
| perl -p -e 's/\([1357]\)<\/a><\/h1>/<\/a><\/h1>/g' \
| (if [ $(basename $(dirname $dest)) == "doc" ]; then
perl -p -e 's/ href="\.\.\// href="/g'
else
cat
fi)
}
man_replace_tokens () {
sed "s|@VERSION@|$version|g" \
| perl -p -e 's/(npm\\-)?([a-zA-Z\\\.\-]*)\(1\)/npm help \2/g' \
| perl -p -e 's/(npm\\-)?([a-zA-Z\\\.\-]*)\(([57])\)/npm help \3 \2/g' \
| perl -p -e 's/(npm\\-)?([a-zA-Z\\\.\-]*)\(3\)/npm apihelp \2/g' \
| perl -p -e 's/npm\(1\)/npm help npm/g' \
| perl -p -e 's/npm\(3\)/npm apihelp npm/g'
}
case $dest in
*.[1357])
./node_modules/.bin/marked-man --roff $src \
| man_replace_tokens > $dest
exit $?
;;
html/partial/*.html)
url=${dest/html\/partial\//}
cat $src | ./node_modules/.bin/marked | html_replace_tokens $url > $dest
;;
html/*.html)
url=${dest/html\//}
(cat html/dochead.html && \
cat $src && \
cat html/docfoot.html)\
| html_replace_tokens $url \
> $dest
exit $?
;;
*)
echo "Invalid destination type: $dest" >&2
exit 1
;;
esac
|
require('dotenv').config();
const fastify = require('fastify')({
logger: true,
})
const puppeteer = require('puppeteer');
function getInt(input, def) {
if (input) {
if (typeof input == "string") return parseInt(input, 10);
if (typeof input == "number") return input;
throw new Error("Unsupported type");
}
return def;
}
const websiteUrl = process.env.URL;
if (!websiteUrl) {
console.error("URL env var required");
process.exit(1);
}
const updateTimeMargin = getInt(process.env.UPDATE_TIME_MARGIN, 10000);
const postLoadDelay = getInt(process.env.POST_LOAD_DELAY, 2000);
let screenshot = null;
let updatePromise = null;
let updateDuration = 0;
let requestLastResponseTime = null;
let requestInterval = 0;
let updateTimer = 0;
let updateTimerDuration = 0;
// Browser launch options
const browserOptions = {
args: [
"--disable-dev-shm-usage",
],
// headless: false,
userDataDir: "./data/",
}
const viewportOptions = {
width: getInt(process.env.WIDTH, 800),
height: getInt(process.env.HEIGHT, 600),
}
/**
* Update the current in-memory screenshot. If an update is already running,
* the call is ignored/dropped (there is no queueing).
*/
async function updateScreenshot(log) {
async function update() {
const startTime = Date.now();
let browser;
try {
log.info("update open browser");
browser = await puppeteer.launch(browserOptions);
log.info("update open page");
const page = await browser.newPage();
await page.setViewport(viewportOptions);
log.info("update goto");
await page.goto(websiteUrl);
log.info("update load");
// Try to speed up any animations
await page._client.send('Animation.setPlaybackRate', { playbackRate: 20 });
// Wait until everything is loaded
await page.waitForNavigation({
waitUntil: 'networkidle0',
});
// Waiting for some time can help when additional content is still loading
// in asynchronously. Without the website telling you, you can't really
// know when it's "really" loaded, so this is a good enough approximation.
await new Promise(resolve => setTimeout(resolve, postLoadDelay));
log.info("update screenshot");
screenshot = await page.screenshot();
} catch(error) {
log.error("update error " + error);
} finally {
if (browser) {
log.info("update close");
await browser.close();
}
updateDuration = Date.now() - startTime;
log.info("update done " + updateDuration + "ms");
updatePromise = null;
}
}
if (!updatePromise) {
updatePromise = update();
}
return updatePromise;
}
/**
* Format provided milliseconds as rounded seconds
*/
function formatTime(ms) {
return (ms / 1000).toFixed(0) + "s";
}
/**
* Schedule a screenshot update to be done as soon as possible (or even sooner).
*
* The best case scenario is that an up-to-date in-memory screenshot will
* already be available _before_ this function is called.
*
* The worst case scenario is that the in-memory screenshot will only be updated
* some time _after_ this function is called.
*
* The function assumes that it will be called periodically with a constant time
* period in-between the calls. This makes it possible to run the next update
* so that it finishes _before_ the next time the function is called.
*
* This is achieved by measuring the time interval between the last call and
* the current one and the time it takes to finish an update.
*/
function scheduleScreenshot(log) {
let timeoutDuration = 0;
if (screenshot) {
const now = Date.now();
if (requestLastResponseTime === null) {
requestLastResponseTime = now;
}
requestInterval = now - requestLastResponseTime;
requestLastResponseTime = now;
timeoutDuration = Math.max(0, requestInterval - updateDuration - updateTimeMargin);
}
if (updateTimer) {
clearTimeout(updateTimer);
log.warn(`scheduler got request ${formatTime(updateTimerDuration - requestInterval - updateDuration)} ahead of time, try increasing time margin`);
timeoutDuration = 0;
}
log.info(`scheduler interval ${formatTime(requestInterval)} - duration ${formatTime(updateDuration)} - margin ${formatTime(updateTimeMargin)} => running after ${formatTime(timeoutDuration)}`);
updateTimerDuration = timeoutDuration;
updateTimer = setTimeout(() => {
updateTimer = 0;
updateScreenshot(log);
}, timeoutDuration);
}
fastify.get('/page.png', async (request, reply) => {
if (screenshot) {
reply.type("image/png");
reply.send(screenshot);
} else {
// Return "No Content" if there is no screenshot immediately available.
// This will be the case until the very first update finishes.
reply.code(204);
}
scheduleScreenshot(request.log);
})
async function listen() {
try {
await fastify.listen(getInt(process.env.PORT, 8000), '0.0.0.0');
} catch (err) {
fastify.log.error(err)
process.exit(1)
}
}
async function start() {
await listen();
}
start();
|
go run main.go -func FuzzFoo -o fuzzer.a github.com/AdamKorcz/go-118-fuzz-build/fuzzers/vitess
clang -o fuzzer fuzzer.a -fsanitize=fuzzer
|
#! /bin/bash
lnav_test="${top_builddir}/src/lnav-test"
run_test ${lnav_test} -C \
-I ${test_dir}/bad-config-json
sed -i "" -e "s|/.*/format|format|g" `test_err_filename`
check_error_output "invalid format not detected?" <<EOF
warning:format.json:line 5
warning: unexpected path --
warning: /invalid_key_log/value/test/identifiers
warning: accepted paths --
warning: kind string|integer|float|boolean|json|quoted -- The type of data in the field
warning: collate <function> -- The collating function to use for this column
warning: unit/ -- Unit definitions for this field
warning: identifier <bool> -- Indicates whether or not this field contains an identifier that should be highlighted
warning: foreign-key <bool> -- Indicates whether or not this field should be treated as a foreign key for row in another table
warning: hidden <bool> -- Indicates whether or not this field should be hidden
warning: action-list# <string> -- Actions to execute when this field is clicked on
warning: rewriter <command> -- A command that will rewrite this field when pretty-printing
warning: description <string> -- A description of the field
error:format.json:4:invalid json -- parse error: object key and value must be separated by a colon (':')
ar_log": { "abc" } }
(right here) ------^
EOF
run_test ${lnav_test} -C \
-I ${test_dir}/bad-config
sed -i "" -e "s|/.*/init.sql|init.sql|g" `test_err_filename`
check_error_output "invalid format not detected?" <<EOF
error:bad_regex_log.regex[std]:missing )
error:bad_regex_log.regex[std]:^(?<timestamp>\d+: (?<body>.*)$
error:bad_regex_log.regex[std]: ^
error:bad_regex_log.level:missing )
error:bad_regex_log:invalid sample -- 1428634687123; foo
error:bad_regex_log:highlighters/foobar:missing )
error:bad_regex_log:highlighters/foobar:abc(
error:bad_regex_log:highlighters/foobar: ^
error:bad_sample_log:invalid sample -- 1428634687123; foo bar
error:bad_sample_log:partial sample matched -- 1428634687123; foo
error: against pattern -- ^(?<timestamp>\d+); (?<body>\w+)$
error:bad_sample_log:partial sample matched -- 1428634687123
error: against pattern -- ^(?<timestamp>\d+): (?<body>.*)$
error:no_sample_log:no sample logs provided, all formats must have samples
error:init.sql:2:near "TALE": syntax error
EOF
run_test ${lnav_test} -n \
-I ${test_dir} \
-c ";select * from leveltest_log" \
-c ':write-csv-to -' \
${test_dir}/logfile_leveltest.0
check_output "levels are not correct?" <<EOF
log_line,log_part,log_time,log_idle_msecs,log_level,log_mark,log_comment,log_tags,log_filters
0,<NULL>,2016-06-30 12:00:01.000,0,trace,0,<NULL>,<NULL>,[]
1,<NULL>,2016-06-30 12:00:02.000,1000,debug,0,<NULL>,<NULL>,[]
2,<NULL>,2016-06-30 12:00:03.000,1000,debug2,0,<NULL>,<NULL>,[]
3,<NULL>,2016-06-30 12:00:04.000,1000,debug3,0,<NULL>,<NULL>,[]
4,<NULL>,2016-06-30 12:00:05.000,1000,info,0,<NULL>,<NULL>,[]
5,<NULL>,2016-06-30 12:00:06.000,1000,warning,0,<NULL>,<NULL>,[]
6,<NULL>,2016-06-30 12:00:07.000,1000,fatal,0,<NULL>,<NULL>,[]
7,<NULL>,2016-06-30 12:00:08.000,1000,info,0,<NULL>,<NULL>,[]
EOF
|
<gh_stars>1-10
package intercept.logging;
public interface ApplicationLog {
public static final ApplicationLog NullApplicationLog = new ApplicationLog() {
@Override
public void log(String message) {
}
@Override
public void trace(String message) {
}
@Override
public void setVerbose() {
}
};
public void log(String message);
void trace(String message);
public void setVerbose();
}
|
<reponame>felhiirad/portfolio<gh_stars>0
import 'bootstrap/dist/css/bootstrap.min.css';
import Particles from 'react-particles-js';
import Contact from '../components/contact/Contact';
import Footer from '../components/footer/Footer';
import Header from '../components/header/Header';
import Navbar from '../components/navbar/Navbar';
import Services from '../components/services/Services';
import Experience from '../experience/Experience';
import About from './../components/about/About';
import Portfolio from './../components/portfolio/Portfolio';
import Testimonial from './../components/testimonial/Testimonial';
import './App.css';
function App() {
return (
<>
<Particles
className='particles-canvas'
params={{
particles: {
number: {
value: 60,
density: {
enable: true,
value_area: 900,
},
},
shape: {
type: 'triangle',
stroke: {
width: 15,
color: '#38A115',
},
},
},
}}
/>
<Navbar/>
<Header/>
<About/>
<Services/>
<Experience/>
<Portfolio/>
<Testimonial/>
<Contact/>
<Footer/>
</>
);
}
export default App;
|
<filename>Trabalho_1/src/aux.h
#pragma once
#include <termios.h>
#include <unistd.h>
/**
* Function to create the Block Check Character relative to the Address and Control fields
* @param a Address Character of the frame
* @param c Control Character of the frame
* @return Expected value for the Block Check Character
*/
unsigned char createBCC(unsigned char a, unsigned char c);
/**
* Function to create the Block Check Character relative to the Data Characters of the frame
* @param frame Frame position where the Data starts
* @param length Number of Data Characters to process
* @return Expected value for the Block Check Character
*/
unsigned char createBCC_2(unsigned char* frame, int length);
/**
* Function to apply byte stuffing to the Data Characters of a frame
* @param frame Address of the frame
* @param length Number of Data Characters to process
* @return Length of the new frame, post byte stuffing
*/
int byteStuffing(unsigned char* frame, int length);
/**
* Function to reverse the byte stuffing applied to the Data Characters of a frame
* @param frame Address of the frame
* @param length Number of Data Characters to process
* @return Length of the new frame, post byte destuffing
*/
int byteDestuffing(unsigned char* frame, int length);
/**
* Function to create a supervision frame for the serial port file transfer protocol
* @param frame Address where the frame will be stored
* @param controlField Control field of the supervision frame
* @param role Role for which to create the frame, marking the difference between the Transmitter and the Receiver
* @return 0 if successful; negative if an error occurs
*/
int createSupervisionFrame(unsigned char* frame, unsigned char controlField, int role);
/**
* Function to create an information frame for the serial port file transfer protocol
* @param frame Address where the frame will be stored
* @param controlField Control field of the supervision frame
* @param infoField Start address of the information to be inserted into the information frame
* @param infoFieldLength Number of data characters to be inserted into the information frame
* @return Returns 0, as there is no place at which an error can occur
*/
int createInformationFrame(unsigned char* frame, unsigned char controlField, unsigned char* infoField, int infoFieldLength);
/**
* Function to read a supervision frame, sent according to the serial port file transfer protocol
* @param frame Address where the frame will be stored
* @param fd File descriptor from which to read the frame
* @param wantedBytes Array containing the possible expected control bytes of the frame
* @param wantedBytesLength Number of possible expected control bytes of the frame
* @param addressByte Address from which a frame is expected
* @return Index of the wanted byte found, in the wantedBytes array
*/
int readSupervisionFrame(unsigned char* frame, int fd, unsigned char* wantedBytes, int wantedBytesLength, unsigned char addressByte);
/**
* Function to read an information frame, sent according to the serial port file transfer protocol
* @param frame Address where the frame will be stored
* @param fd File descriptor from which to read the frame
* @param wantedBytes Array containing the possible expected control bytes of the frame
* @param wantedBytesLength Number of possible expected control bytes of the frame
* @param addressByte Address from which a frame is expected
* @return Length of the data packet sent, including byte stuffing and BCC2
*/
int readInformationFrame(unsigned char* frame, int fd, unsigned char* wantedBytes, int wantedBytesLength, unsigned char addressByte);
/**
* Function to send a frame to the designated file descriptor
* @param frame Start address of the frame to the sent
* @param fd File descriptor to which to write the information
* @param length Size of the frame to be sent (size of information to be written)
* @return Number of bytes written if successful; negative if an error occurs
*/
int sendFrame(unsigned char* frame, int fd, int length);
/**
* Function to read a byte from the designated file descriptor
* @param byte Address to which to store the byte
* @param fd File descriptor from which to read the byte
* @return Return value of the read() call if successful; negative if an error occurs
*/
int readByte(unsigned char* byte, int fd);
/**
* Function to open the file descriptor through which to execute the serial port communications,
* in the non-canonical mode, according to the serial port file transfer protocol
* @param port Name of the port to be opened
* @param oldtio Struct where the pre-open port settings will be stored
* @param vtime Value to be assigned to the VTIME field of the new settings - time between bytes read
* @param vmin Value to be assigned to the VMIN field of the new settings - minimum amount of bytes to read
* @return File descriptor that was opened with the given port
*/
int openNonCanonical(char* port, struct termios* oldtio, int vtime, int vmin);
/**
* Function to close the file descriptor through which the serial port communications were executed
* @param fd File descriptor where the port has been opened
* @param oldtio Struct containing the original port settings have been saved, so they can be restored
* @return 0 if successful; negative if an error occurs
*/
int closeNonCanonical(int fd, struct termios* oldtio);
/**
* Function to install the alarm handler, using sigaction
*/
void alarmHandlerInstaller();
// ------------------------------
/**
* Auxiliary function to convert a decimal value into two (max. 8 bits) values, for hexadecimal representation
* @param k Decimal value to be converted
* @param l1 Least significant bits of the converted value
* @param l2 Most significant bits of the converted value
*/
void convertValueInTwo(int k, int* l1, int* l2);
/**
* Auxiliary function to convert two (max. 8 bits) values, from hexadecimal representation, into one single decimal
* @param l1 Least significant bits of the value to be converted
* @param l2 Most significant bits of the value to be converted
* @return Decimal converted value
*/
int convertValueInOne(int l1, int l2);
|
<reponame>j-v-a/Beasts-challenges
/*
The myIsPrototypeOf() method checks if an object exists in another object's prototype chain.
** Parameters:
prototypeObj: The prototype object which will be searched for.
object: The object whose prototype chain will be searched.
** Return value:
A Boolean indicating whether the calling object lies in the prototype chain of the specified object.
*/
function myIsPrototypeOf(prototypeObj, object) {
var objectPrototype = Object.getPrototypeOf(object);
// exclude null
if (objectPrototype === null) {
return false;
// Check if prototypeObj is the direct prototype of object or Object.prototype is a prototype of object
} else if (
prototypeObj === objectPrototype ||
Object.prototype === objectPrototype
) {
return true;
// step up the prototype chain
} else {
return myIsPrototypeOf(prototypeObj, objectPrototype);
}
}
|
import Entity from '../containers/Entity';
import Request from '../containers/Request';
class LocalSerializer {
/*
Serializer used when running local transforms.
*/
static serialize(maltegoArgs) {
const request = new Request();
const [value, ...properties] = maltegoArgs;
const inputEntity = new Entity('maltego.Local', value);
LocalSerializer.addPropertiesToEntity(properties, inputEntity);
request.addEntity(inputEntity);
return request;
}
static addPropertiesToEntity(properties, entity) {
properties.forEach(prop => {
const [key, val] = prop.split('=', 2);
entity.addProperty(key, val);
});
}
}
export default LocalSerializer;
|
from django.db import models
from django.urls import reverse
from django.utils.translation import ugettext_lazy as __
from django_business_rules.model_mixins import SoftDeleteAbstractMixin
class BusinessRuleModel(SoftDeleteAbstractMixin):
name = models.CharField(
unique=True, verbose_name=__('name'), max_length=150)
description = models.TextField(blank=True, verbose_name=__('description'))
rule_data = models.TextField(verbose_name=__('rule data'))
rules = models.TextField(verbose_name=__('rules'), default={})
def get_absolute_url(self):
return reverse('django_business_rules:business-rule-form',
kwargs={'pk': self.pk})
def __str__(self):
return self.name
|
<filename>lib/index.ts
const playcap = require('bindings')('playcap.node')
export enum DeviceType {
Playback = 1,
Capture = 2,
Duplex = 3,
};
export enum Backend {
Wasapi = 0,
Dsound = 1,
Winmm = 2,
Coreaudio = 3,
Sndio = 4,
Audio4 = 5,
Oss = 6,
Pulseaudio = 7,
Alsa = 8,
Jack = 9,
Aaudio = 10,
Opensl = 11,
Webaudio = 12,
Custom = 13,
Null = 14,
};
export type DeviceInfo = {
name: string;
isDefault: boolean;
}
export type DeviceOptions = {
playbackChannels?: number;
captureChannels?: number;
sampleRate?: number;
playbackDeviceIndex?: number;
captureDeviceIndex?: number;
deviceType?: number;
}
type Device = {
start: () => void;
destroy: () => void;
}
export type DataCallback = (inputs: Float32Array[], outputs: Float32Array[]) => void;
export interface Context {
getDevices(): DeviceInfo[];
refreshDevices(): void;
createDevice(options: DeviceOptions, cb: DataCallback): Device;
}
export var Context: {
new(): Context
} = playcap.Context;
|
<?php include_once(__DIR__ . '/fzf/Action.sh'); ?>
<?php include_once(__DIR__ . '/all/Action.sh'); ?>
<?php include_once(__DIR__ . '/demo/Action.sh'); ?>
|
#!/bin/bash
## CAUTIOUS:
## Ensure that environment variable exports should not have the user name printed in the export script
## Use single quotes to ensure the environment variables does not get expanded
## This config can also be used to overrides the default environment variables only for the current shell
## by including in the main executing script
declare -a AI_CFG_DIRS=(
"model"
"model/release"
"dataset"
"arch"
)
declare -a AI_DATA_DIRS=(
"data-gaze"
"data-gaze/AIML_Annotation"
"data-gaze/AIML_Database"
"data-gaze/AIML_Aids"
"data-gaze/AIML_Database_Test"
"data-gaze/AIML_Aids_Test"
"data-mongodb"
"data-mongodb/db"
"data-mongodb/logs"
"data-mongodb/key"
"data-mongodb/configdb"
"data-mobile"
"data-public"
"logs"
"logs/www"
"logs/www/uploads"
"release"
"samples"
"tfrecords"
)
## ---------------------------------##
declare -a AI_PY_ENVVARS=(
'AI_APP'
'AI_HOME_EXT'
'MASK_RCNN'
'FASTER_RCNN'
'CAFFE_ROOT'
'AI_LANENET_ROOT'
)
declare -a AI_REMOTE_MACHINE_IDS=(
"alpha"
"jarvis"
"ultron"
"venom"
"flash"
"samba-100"
)
## ---------------------------------##
local MONOGODB_USER=mongodb
local MONOGODB_GROUP=mongodb
## ---------------------------------##
local AI_BASEPATH=""
local AI_CODE_BASE_PATH="/codehub"
local AI_DIR_PREFIX="aimldl"
local AI_GOOGLE_APPLICATION_CREDENTIALS_FILE=""
local AI_MOUNT_MACHPREFIX='vtq' ## possible values: 'vtd' or 'mmi'
local AI_VM_BASE="virtualmachines"
# local AI_VM_HOME=${AI_CODE_BASE_PATH}/${AI_VM_BASE}
local AI_VM_HOME=/${AI_VM_BASE}
local AI_PY_VENV_PATH=${AI_VM_HOME}/virtualenvs
local WORKON_HOME=${AI_PY_VENV_PATH}
local AI_VIRTUALENVWRAPPER=/usr/local/bin/virtualenvwrapper.sh
## What Is /dev/shm And Its Practical Usage?
## https://www.cyberciti.biz/tips/what-is-devshm-and-its-practical-usage.html
## AI Top Level Directories
local AI_CFG_BASE_PATH="${AI_CODE_BASE_PATH}/cfg"
local AI_DATA_BASE_PATH="/${AI_DIR_PREFIX}-dat"
local AI_MOUNT_BASE_PATH="/${AI_DIR_PREFIX}-mnt"
local AI_DOC_BASE_PATH="/${AI_DIR_PREFIX}-doc"
local AI_RPT_BASE_PATH="/${AI_DIR_PREFIX}-rpt"
local AI_KBANK_BASE_PATH="/${AI_DIR_PREFIX}-kbank"
## for tensorflow object detection api
local AI_TF_OD_API_PATH="${AI_CODE_BASE_PATH}/external/tensorflow/models/research"
## ----------IMP--------------------##
## This has to be changed manually, and aimldl.setup.sh needs to be executed again!
local AI_PYVER=3
local AI_PY_VENV_NAME="py_3-6-9_2019-12-21"
## for 'alpha' system
local AI_PY_VENV_NAME="py_3-6-8_2019-12-25"
## master
local AI_PY_VENV_NAME="py_3-6-9_2020-01-28"
# local AI_PY_VENV_NAME="py_3_20200128_1342"
## 'alpha' - docker container env
# local AI_PY_VENV_NAME="py_3_20200122_1504"
local AI_WSGIPythonPath="${AI_PY_VENV_NAME}/bin"
local AI_WSGIPythonHome="${AI_PY_VENV_NAME}/lib/python3.6/site-packages/"
## ---------------------------------##
local AI_DATA_GAZE="/aimldl-dat/data-gaze"
local AI_ANNON_DATA_HOME="/data/samba/Bangalore/prod/Bangalore_Maze_Exported_Data/ANNOTATIONS"
local AI_ANNON_DB="/aimldl-dat/data-gaze/AIML_Database"
local AI_ANNON_DB_TEST="/aimldl-dat/data-gaze/AIML_Database_Test"
local AI_AIDS_DB="/aimldl-dat/data-gaze/AIML_Aids"
# local AI_ANNON_DATA_HOME_LOCAL="/aimldl-dat/data-gaze/AIML_Annotation"
# local AI_ANNON_DATA_HOME_LOCAL="/aimldl-dat/data-gaze/AIML_Annotation/*/annotations"
local AI_ANNON_DATA_HOME_LOCAL="/aimldl-dat/data-gaze/AIML_Annotation/ods_merged_on_050719"
local AI_ANNON_DATA_HOME_LOCAL="/aimldl-dat/data-gaze/AIML_Annotation/ods_merged_on_290719"
local AI_ANNON_DATA_HOME_LOCAL="/aimldl-dat/data-gaze/AIML_Annotation/ods_merged_on_240919_121321"
local AI_ANNON_DATA_HOME_LOCAL="/aimldl-dat/data-gaze/AIML_Annotation/ods_job_181219"
local AI_ANNON_DATA_HOME_LOCAL="/aimldl-dat/data-gaze/AIML_Annotation/ods_merged_on_281219_125647"
# local AI_ANNON_DATA_HOME="${AI_ANNON_DATA_HOME_LOCAL}"
## Inspired by:
## https://github.com/ApolloAuto/apollo/tree/master/scripts
## absoulte path will always be /aimldl-dat
local AI_WEIGHTS_PATH="release" ## default value for production/CBR release
## uncomment and give custom relative path for model - should be used for development work
# local AI_WEIGHTS_PATH="logs"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.