text stringlengths 1 1.05M |
|---|
<filename>sql_dump/ashu_yii2.sql
-- phpMyAdmin SQL Dump
-- version 4.6.6deb5
-- https://www.phpmyadmin.net/
--
-- Host: localhost:3306
-- Generation Time: Jul 17, 2020 at 03:17 PM
-- Server version: 5.7.30-0ubuntu0.18.04.1
-- PHP Version: 7.2.24-0ubuntu0.18.04.6
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `ashu_yii2`
--
-- --------------------------------------------------------
--
-- Table structure for table `new_user`
--
CREATE TABLE `new_user` (
`id` int(11) NOT NULL,
`username` varchar(100) DEFAULT NULL,
`email` varchar(100) DEFAULT NULL,
`password` varchar(100) DEFAULT NULL,
`authKey` varchar(255) DEFAULT NULL,
`accessToken` varchar(255) DEFAULT NULL,
`role` tinyint(4) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `post`
--
CREATE TABLE `post` (
`id` int(11) NOT NULL,
`title` varchar(80) DEFAULT NULL,
`description` text,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Indexes for dumped tables
--
--
-- Indexes for table `new_user`
--
ALTER TABLE `new_user`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `post`
--
ALTER TABLE `post`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `new_user`
--
ALTER TABLE `new_user`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `post`
--
ALTER TABLE `post`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
YUI.add('aui-toggler-accessibility-tests', function(Y) {
//--------------------------------------------------------------------------
// TogglerAccessibility Tests
//--------------------------------------------------------------------------
var suite = new Y.Test.Suite('aui-toggler-accessibility');
var togglerNode = Y.one('#toggler');
suite.add(new Y.Test.Case({
name: 'TogglerAccessibility',
setUp: function() {
this.toggler = new Y.Toggler({
animated: false,
content: '.content',
expanded: true,
header: '.btn'
});
},
tearDown: function() {
this.toggler.destroy();
},
_assertARIAAttributeValues: function(header, content, expanded) {
Y.Assert.isTrue(header.getAttribute('aria-pressed') === (expanded ? 'true' : 'false'));
Y.Assert.isTrue(content.getAttribute('aria-hidden') === (expanded ? 'false' : 'true'));
},
_simulateToggleEvent: function(keyCode) {
togglerNode.simulate('keydown', { keyCode: keyCode || 13 });
},
'test toggler has correct aria attributes': function() {
var toggler = this.toggler,
content = toggler.get('content'),
header = toggler.get('header');
Y.Assert.isTrue(header.hasAttribute('aria-pressed'));
Y.Assert.isTrue(header.hasAttribute('aria-controls'));
Y.Assert.isTrue(content.hasAttribute('aria-hidden'));
this._assertARIAAttributeValues(header, content, true);
Y.Assert.areSame(content.guid(), header.getAttribute('aria-controls'));
this._simulateToggleEvent();
this._assertARIAAttributeValues(header, content, false);
this._simulateToggleEvent();
this._assertARIAAttributeValues(header, content, true);
// Simulate down arrow
this._simulateToggleEvent(40);
this._assertARIAAttributeValues(header, content, true);
// Simulate up arrow
this._simulateToggleEvent(38);
this._assertARIAAttributeValues(header, content, false);
// Simulate down arrow
this._simulateToggleEvent(40);
this._assertARIAAttributeValues(header, content, true);
}
}));
Y.Test.Runner.add(suite);
}, '', {
requires: ['aui-toggler-accessibility', 'node-event-simulate', 'test']
});
|
package org.c_base.ampelcontrol;
import android.content.Context;
public class TrafficLightHolder {
private final Context context;
private final TrafficLight trafficLight;
TrafficLightHolder(Context context, TrafficLight trafficLight) {
this.context = context;
this.trafficLight = trafficLight;
}
public String getHost() {
return trafficLight.getHost();
}
@Override
public String toString() {
return context.getString(trafficLight.getNameResId());
}
}
|
package kfs.csv;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
*
* @author pavedrim
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface Csv {
String inner() default "";
String sorting() default "";
String name() default "";
String sqlname() default "";
Class<? extends CsvStrConvertor> conv() default CsvStrConvertor.class;
}
|
<gh_stars>1-10
package bytesurl
import (
"bytes"
"sort"
)
// Values maps a string key to a list of values.
// It is typically used for query parameters and form values.
// Unlike in the http.Header map, the keys in a Values map
// are case-sensitive.
type Values map[string][][]byte
// Get gets the first value associated with the given key.
// If there are no values associated with the key, Get returns
// the empty string. To access multiple values, use the map
// directly.
func (v Values) Get(key string) []byte {
if v == nil {
return EmptyByte
}
vs, ok := v[key]
if !ok || len(vs) == 0 {
return EmptyByte
}
return vs[0]
}
// Set sets the key to value. It replaces any existing
// values.
func (v Values) Set(key string, value []byte) {
v[key] = [][]byte{value}
}
// Add adds the value to key. It appends to any existing
// values associated with key.
func (v Values) Add(key string, value []byte) {
v[key] = append(v[key], value)
}
// Del deletes the values associated with key.
func (v Values) Del(key string) {
delete(v, key)
}
// ParseQuery parses the URL-encoded query string and returns
// a map listing the values specified for each key.
// ParseQuery always returns a non-nil map containing all the
// valid query parameters found; err describes the first decoding error
// encountered, if any.
func ParseQuery(query []byte) (m Values, err error) {
m = make(Values)
err = parseQuery(m, query)
return
}
func parseQuery(m Values, query []byte) (err error) {
for bytes.Compare(query, EmptyByte) != 0 {
key := query
if i := bytes.IndexAny(key, "&;"); i >= 0 {
key, query = key[:i], key[i+1:]
} else {
query = EmptyByte
}
if bytes.Equal(key, EmptyByte) {
continue
}
value := EmptyByte
if i := bytes.Index(key, EqualByte); i >= 0 {
key, value = key[:i], key[i+1:]
}
key, err1 := QueryUnescape(key)
if err1 != nil {
if err == nil {
err = err1
}
continue
}
value, err1 = QueryUnescape(value)
if err1 != nil {
if err == nil {
err = err1
}
continue
}
indexKey := string(key)
m[indexKey] = append(m[indexKey], value)
}
return err
}
// Encode encodes the values into ``URL encoded'' form
// ("bar=baz&foo=quux") sorted by key.
func (v Values) Encode() string {
if v == nil {
return ""
}
var buf bytes.Buffer
keys := make([]string, 0, len(v))
for k := range v {
keys = append(keys, k)
}
sort.Strings(keys)
for _, k := range keys {
vs := v[k]
prefix := append(QueryEscape([]byte(k)), EqualByte...)
for _, v := range vs {
if buf.Len() > 0 {
buf.WriteByte('&')
}
buf.Write(prefix)
buf.Write(QueryEscape(v))
}
}
return buf.String()
}
|
package io.opensphere.core.util.javafx.input.view.behavior;
import javafx.scene.control.Control;
import javafx.scene.input.KeyCode;
import javafx.scene.input.KeyEvent;
/**
* A behavior extension used to handle key events.
*
* @param <C> the control type for which the behavior is implemented.
*/
public interface KeyBehavior<C extends Control> extends Behavior<C>
{
/**
* Adds the supplied binding to the behavior.
*
* @param pBinding the binding to add.
*/
void addBinding(KeyActionBinding pBinding);
/**
* Removes the supplied binding from the behavior, if present. If not present, no action is taken.
*
* @param pBinding the binding to remove.
*/
void removeBinding(KeyActionBinding pBinding);
/**
* Creates a new binding and adds it to the behavior, binding the key to the action.
*
* @param pCode the key code to bind to the action.
* @param pAction the action to be bound to the key.
*/
void bind(KeyCode pCode, String pAction);
/**
* Creates a new binding and adds it to the behavior, binding the key to the action.
*
* @param pCode the key code to bind to the action.
* @param pAction the action to be bound to the key.
* @param pModifiers the set of modifier keys to apply to the binding.
* @throws IllegalArgumentException if one or more of the supplied modifiers is represented by a {@link KeyCode} that returns
* false for a call to the {@link KeyCode#isModifierKey()} method.
*/
void bind(KeyCode pCode, String pAction, KeyCode... pModifiers);
/**
* Removes the binding from the behavior, if present.
*
* @param pCode the binding to remove.
*/
void removeBinding(KeyCode pCode);
/**
* Called when a key event is fired. The event will be distributed to all bound actions, if present.
*
* @param pEvent the event to process.
*/
void keyEvent(KeyEvent pEvent);
/**
* Invokes the action. Callers should override this method to process actions in which they are interested.
*
* @param pAction the action to invoke.
*/
void actionPerformed(String pAction);
}
|
def capitalizeCharacter(s, pos):
# Grab single characters from the string
# starting from the given index
result = s[:pos] + s[pos].upper() + s[pos + 1:]
return result |
require 'securerandom'
require 'timers'
require 'thread'
require 'json'
require 'aws_iot_device/mqtt_shadow_client/token_creator'
require 'aws_iot_device/mqtt_shadow_client/json_payload_parser'
module AwsIotDevice
module MqttShadowClient
class ShadowActionManager
### This the main AWS action manager
### It enables the AWS IoT actions (get, update, delete)
### It enables the time control the time out after an action have been start
### Actions requests are send on the general actions topic and answer is retreived from accepted/refused/delta topics
attr_accessor :logger
def initialize(shadow_name, mqtt_client, persistent_subscribe=false)
@shadow_name = shadow_name
@topic_manager = ShadowTopicManager.new(mqtt_client, shadow_name)
@payload_parser = JSONPayloadParser.new
@is_subscribed = {}
@is_subscribed[:get] = false
@is_subscribed[:update] = false
@is_subscribed[:delete] = false
@token_handler = TokenCreator.new(shadow_name, mqtt_client.client_id)
@persistent_subscribe = persistent_subscribe
@last_stable_version = -1 #Mean no currentely stable
@topic_subscribed_callback = {}
@topic_subscribed_callback[:get] = nil
@topic_subscribed_callback[:update] = nil
@topic_subscribed_callback[:delta] = nil
@topic_subscribed_task_count = {}
@topic_subscribed_task_count[:get] = 0
@topic_subscribed_task_count[:update] = 0
@topic_subscribed_task_count[:delete] = 0
@token_pool = {}
@token_callback = {}
@task_count_mutex = Mutex.new
@token_mutex = Mutex.new
@parser_mutex = Mutex.new
set_basic_callback
end
### Send and publish packet with an empty payload contains in a valid JSON format.
### A unique token is generate and send in the packet in order to trace the action.
### Subscribe to the two get/accepted and get/rejected of the coresponding shadow.
### If the request is accpeted, the answer would be send on the get/accepted topic.
### It contains all the details of the shadow state in JSON document.
### A specific callback in Proc could be send parameter.
### Before exit, the function start a timer count down in the separate thread.
### If the time ran out, the timer_handler function is called and the get action is cancelled using the token.
###
### Parameter:
### > callback: the Proc to execute when the answer to th get request would be received.
### It should accept three different paramter:
### - payload : the answer content
### - response_status : among ['accepted', 'refused', 'delta']
### - token : the token assoicate to the get request
###
### > timeout: the period after which the request should be canceled and timer_handler should be call
###
### Returns :
### > the token associate to the current action (which also store in @token_pool)
def shadow_get(timeout=5, callback=nil, &block)
shadow_action(:get, "", timeout, callback, &block)
end
def shadow_update(payload, timeout=5, callback=nil, &block)
shadow_action(:update, payload, timeout, callback, &block)
end
def shadow_delete(timeout=5, callback=nil, &block)
shadow_action(:delete, "", timeout, callback, &block)
end
def register_get_callback(callback, &block)
register_action_callback(:get, callback, &block)
end
def register_update_callback(callback, &block)
register_action_callback(:update, callback, &block)
end
def register_delete_callback(callback, &block)
register_action_callback(:delete, callback, &block)
end
def register_shadow_delta_callback(callback, &block)
if callback.is_a?(Proc)
@topic_subscribed_callback[:delta] = callback
elsif block_given?
@topic_subscribed_callback[:delta] = block
end
@topic_manager.shadow_topic_subscribe("delta", @default_callback)
end
def remove_get_callback
remove_action_callback(:get)
end
def remove_update_callback
remove_action_callback(:update)
end
def remove_delete_callback
remove_action_callback(:delete)
end
def remove_shadow_delta_callback
@topic_subscribe_callback.delete[:delta]
@topic_manager.shadow_topic_unsubscribe("delta")
end
def logger?
!@logger.nil? && @logger.is_a?(Logger)
end
private
def shadow_action(action, payload="", timeout=5, callback=nil, &block)
current_token = Symbol
timer = Timers::Group.new
json_payload = ""
@token_mutex.synchronize(){
current_token = @token_handler.create_next_token
}
timer.after(timeout){ timeout_manager(action, current_token) }
@parser_mutex.synchronize {
@payload_parser.set_message(payload) unless payload == ""
@payload_parser.set_attribute_value("clientToken", current_token)
json_payload = @payload_parser.get_json
}
handle_subscription(action, timeout) unless @is_subscribed[action]
@topic_manager.shadow_topic_publish(action.to_s, json_payload)
@task_count_mutex.synchronize {
@topic_subscribed_task_count[action] += 1
}
@token_pool[current_token] = timer
register_token_callback(current_token, callback, &block)
Thread.new{ timer.wait }
current_token
end
### Should cancel the token after a preset time interval
def timeout_manager(action_name, token)
if @token_pool.has_key?(token)
action = action_name.to_sym
@token_pool.delete(token)
@token_callback.delete(token)
@logger.warn("The #{action_name} request with the token #{token} has timed out!\n") if logger?
@task_count_mutex.synchronize {
@topic_subscribed_task_count[action] -= 1
unless @topic_subscribed_task_count[action] <= 0
@topic_subscribed_task_count[action] = 0
unless @persistent_subscribe
@topic_manager.shadow_topic_unsubscribe(action)
@is_subscribed[action.to_sym] = false
end
end
}
end
end
def set_basic_callback
@default_callback = proc { |message| do_message_callback(message) }
@topic_manager.on_suback = lambda do |topics|
action = @topic_manager.retrieve_action(topics[0])
@is_subscribed[action] ||= true unless action.nil?
end
@topic_manager.on_unsuback = lambda do |topics|
action = @topic_manager.retrieve_action(topics[0])
@is_subscribed[action] = false if action.nil?
end
end
def register_token_callback(token, callback, &block)
if callback.is_a?(Proc)
@token_callback[token] = callback
elsif block_given?
@token_callback[token] = block
end
end
def remove_token_callback(token)
@token_callback.delete(token)
end
def register_action_callback(action, callback, &block)
if callback.is_a?(Proc)
@topic_subscribed_callback[action] = callback
elsif block_given?
@topic_subscribed_callback[action] = block
end
end
def remove_action_callback(action)
@topic_subscribed_callback[action] = nil
end
def decresase_task_count(action)
@topic_subscribed_task_count[action] -= 1
if @topic_subscribed_task_count[action] <= 0
@topic_subscribed_task_count[action] = 0
unless @persistent_subscribe
@topic_manager.shadow_topic_unsubscribe(action.to_s)
@is_subscribed[action] = false
end
end
end
### The default callback that is called by every actions
### It acknowledge the accepted status if action success
### Call a specific callback for each actions if it defined have been register previously
def do_message_callback(message)
topic = message.topic
action = parse_action(topic)
type = parse_type(topic)
payload = message.payload
token = nil
new_version = -1
@parser_mutex.synchronize() {
@payload_parser.set_message(payload)
new_version = @payload_parser.get_attribute_value("version")
token = @payload_parser.get_attribute_value("clientToken")
}
if %w(get update delete).include?(action)
if @token_pool.has_key?(token)
@token_pool[token].cancel
@token_pool.delete(token)
if type.eql?("accepted")
do_accepted(message, action.to_sym, token, type, new_version)
else
do_rejected(token, action, new_version)
end
@task_count_mutex.synchronize {
decresase_task_count(action.to_sym)
}
end
elsif %w(delta).include?(action)
do_delta(message, new_version)
end
end
def do_accepted(message, action, token, type, new_version)
if new_version && new_version >= @last_stable_version
@logger.info("The #{action} action with the token #{token} have been accepted.") if logger?
type.eql?("delete") ? @last_stable_version = -1 : @last_stable_version = new_version
Thread.new do
accepted_tasks(message, action, token)
end
else
@logger.warn("CATCH AN ACCEPTED #{action} BUT OUTDATED/INVALID VERSION (= #{new_version})\n") if logger?
end
end
def accepted_tasks(message, action, token)
@topic_subscribed_callback[action].call(message) unless @topic_subscribed_callback[action].nil?
@token_callback[token].call(message) if @token_callback.has_key?(token)
@token_callback.delete(token)
end
def do_rejected(token, action, new_version)
if new_version && new_version >= @last_stable_version
@logger.info("The #{action} action with the token #{token} have been rejected.") if logger?
@token_callback.delete(token)
else
@logger.warn("CATCH AN REJECTED #{action} BUT OUTDATED/INVALID VERSION (= #{new_version})\n") if logger?
end
end
def do_delta(message, new_version)
if new_version && new_version >= @last_stable_version
@logger.info("A delta action have been accepted.") if logger?
@last_stable_version = new_version
Thread.new { @topic_subscribed_callback[:delta].call(message) } unless @topic_subscribed_callback[:delta].nil?
else
@logger.warn("CATCH A DELTA BUT OUTDATED/INVALID VERSION (= #{new_version})\n") if logger?
end
end
def handle_subscription(action, timeout)
@topic_manager.shadow_topic_subscribe(action.to_s, @default_callback)
if @topic_manager.paho_client?
ref = Time.now + timeout
while !@is_subscribed[action] && handle_timeout(ref) do
sleep 0.0001
end
else
sleep 2
end
end
def handle_timeout(ref)
Time.now <= ref
end
def parse_shadow_name(topic)
topic.split('/')[2]
end
def parse_action(topic)
if topic.split('/')[5] == "delta"
topic.split('/')[5]
else
topic.split('/')[4]
end
end
def parse_type(topic)
topic.split('/')[5]
end
end
end
end
|
# Read two integer values
a = int(input())
b = int(input())
# Swap their values
temp = a
a = b
b = temp
# Print the variables
print("a =", a)
print("b =", b) |
<gh_stars>1-10
class MockRelationsPopulator < Indago::RelationsPopulator
def process(*); end
end
|
package com.example.lsireneva.todoapp.fragments;
import android.content.Context;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import com.example.lsireneva.todoapp.R;
/**
* Created by <NAME>
*/
public class EditItemFragment extends Fragment {
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_PARAM1 = "param1";
private static final String ARG_PARAM2 = "param2";
// TODO: Rename and change types of parameters
private String mParam1;
private String mParam2;
View editFragmentView;
private OnFragmentInteractionListener mListener;
public EditItemFragment() {
// Required empty public constructor
}
public static EditItemFragment newInstance(String param1, String param2) {
EditItemFragment fragment = new EditItemFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM1, param1);
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mParam1 = getArguments().getString(ARG_PARAM1);
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
editFragmentView = inflater.inflate(R.layout.fragment_edit_item, container, false);
getActivity().getWindow().setSoftInputMode(
WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
return editFragmentView;
}
// TODO: Rename method, update argument and hook method into UI event
public void onButtonPressed(Uri uri) {
if (mListener != null) {
mListener.onFragmentInteraction(uri);
}
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
}
@Override
public void onDetach() {
super.onDetach();
mListener = null;
}
public interface OnFragmentInteractionListener {
// TODO: Update argument type and name
void onFragmentInteraction(Uri uri);
}
}
|
/** Configuration for WPS plugin. */
package io.opensphere.wps.config.v1;
|
<gh_stars>1-10
var webpack = require('webpack');
var path = require('path');
var devConfig = require('./webpack.config.js');
var BUILD_DIR = path.resolve(__dirname, 'dist');
var CleanWebpackPlugin = require('clean-webpack-plugin');
devConfig.devtool = 'cheap-module-source-map';
// Remove the Hot Module Replacement entry points
devConfig.entry.splice(0,2);
// Remove the HMR plugin
devConfig.plugins.splice(0, 1);
// Add code optimisations
devConfig.plugins = devConfig.plugins.concat([
new CleanWebpackPlugin(BUILD_DIR),
new webpack.DefinePlugin({
'process.env': {
'NODE_ENV': JSON.stringify('production')
}
}),
new webpack.optimize.UglifyJsPlugin({minimize: true}),
new webpack.optimize.DedupePlugin()
]);
module.exports = devConfig; |
class Node {
public:
// Constructor
Node(int value);
// Member Variables
int value;
Node* next;
// Member Functions
void insert (int value);
bool contains (int value);
void remove (int value);
};
// Constructor
Node::Node(int value) {
this->value = value;
this->next = nullptr;
}
// Insert a node with given value after this
void Node::insert(int value) {
Node* newNode = new Node(value);
newNode->next = this->next;
this->next = newNode;
}
// Check if linked list contains given value
bool Node::contains(int value) {
if (this->value == value) {
return true;
}
if (this->next == nullptr) {
return false;
}
return this->next->contains(value);
}
// Remove given value from linked list if it exists
void Node::remove(int value) {
Node* node = this;
while (node->next != nullptr) {
if (node->next->value == value) {
Node* temp = node->next;
node->next = temp->next;
delete temp;
break;
}
node = node->next;
}
} |
#!/bin/bash
# random_test is a tool to check the sanity of media-server, including origin
# server and edge server. And it also checks the status of server, such as
# cpu and memory usage.
# Author: Jiashun Zhu(zhujiashun2010@gmail.com)
if [ $# -eq 0 ]
then
echo "Usgae: bash script.sh <test_duration_time> [<cpu_threshold> <mem_threshold>]"
exit 1
fi
ms=./media_server
rt=./random_test
host=127.0.0.1
inner_port=8900
outer_port=8901
publishcdn_port=8906
playcdn1_port=8907
playcdn2_port=8904
playcdn3_port=8905
pid_list=()
port_list=()
export TCMALLOC_SAMPLE_PARAMETER=524288 # required by heap profiler
nohup $ms -port=$inner_port &
pid_list+=(`echo $!`)
port_list+=($inner_port)
nohup $ms -port=$publishcdn_port -proxy_to=$host:$inner_port &
pid_list+=(`echo $!`)
port_list+=($publishcdn_port)
nohup $ms -port=$outer_port -proxy_to=$host:$inner_port &
pid_list+=(`echo $!`)
port_list+=($outer_port)
nohup $ms -port=$playcdn1_port -proxy_to=$host:$outer_port &
pid_list+=(`echo $!`)
port_list+=($playcdn1_port)
nohup $ms -port=$playcdn2_port -proxy_to=$host:$outer_port &
pid_list+=(`echo $!`)
port_list+=($playcdn2_port)
nohup $ms -proxy_to=list://$host:$playcdn1_port,$host:$playcdn2_port -proxy_lb=rr -port=$playcdn3_port &
pid_list+=(`echo $!`)
port_list+=($playcdn3_port)
kill_all() {
for ((i=0; i<${#pid_list[@]}; i++));
do
kill -9 ${pid_list[$i]}
done
}
test_time=$1
echo "Testting..."
# pressure testing
nohup $rt -play_server=$host:$inner_port -push_server=$host:$inner_port --test_duration_s=$test_time -dummy_port=8910 &
nohup $rt -play_server=$host:$outer_port -push_server=$host:$inner_port --test_duration_s=$test_time -dummy_port=8911 &
nohup $rt -play_server=$host:$playcdn1_port -push_server=$host:$publishcdn_port --test_duration_s=$test_time -dummy_port=8912 &
nohup $rt -play_server=$host:$playcdn3_port -push_server=$host:$inner_port --test_duration_s=$test_time -dummy_port=8913 &
# functional testing
./rtmp_press --push_server=127.0.0.1:8906 --play_server=127.0.0.1:8905 --push_vhost=test.com --play_vhost=test.com --match_duration=$test_time
if [ $? -ne 0 ]; then
echo "Fail to pass functional testing"
kill_all
exit 1
fi
echo "[OK] Passing functional testing..."
# cpu and mem testing
for ((i=0; i<${#pid_list[@]}; i++));
do
t=`ps -p ${pid_list[$i]} -o %cpu,rss | tail -1`
IFS=' ' read -r -a array <<< $t
cpu_threshold=35 # it means 35% cpu usage
if [ -n "$2" ]; then
cpu_threshold=$2
fi
cpu_use=`echo ${array[0]}'>'$cpu_threshold | bc -l`
if [ $cpu_use -eq 1 ]; then
echo "CPU usage of process is greater than $cpu_threshold%, cpu profile:"
curl 127.0.0.1:${port_list[$i]}/hotspots/cpu
kill_all
exit 1
fi
mem_threshold=400000 #400M
if [ -n "$3" ]; then
mem_threshold=$3
fi
mem_use=`echo ${array[1]}'>'$mem_threshold | bc -l`
if [ $mem_use -eq 1 ]; then
echo "Mem usage of process is greater than $mem_threshold, mem profile:"
curl 127.0.0.1:${port_list[$i]}/hotspots/heap
kill_all
exit 1
fi
done
echo "[OK] Passing cpu and mem testing..."
# stability testing
core_rc=0
modules=(inner publish_cdn outer play_cdn1 play_cdn2 play_cdn3)
for ((i=0; i<${#pid_list[@]}; i++));
do
if ps -p ${pid_list[$i]} > /dev/null
then
echo "${modules[$i]} is running"
else
echo "${modules[$i]} is not running"
core_rc=1
fi
done
if [ $core_rc -ne 0 ]; then
echo "Fail to pass stability testing, one process has failed"
kill_all
exit $core_rc
fi
echo "[OK] Passing stability testing..."
echo "Passing all tests!"
kill_all
exit 0
|
#!/usr/bin/env bash
set -e pipefail
curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh | bash
|
function checkExists(arr, el) {
return arr.indexOf(el) !== -1;
}
let array = ["apple", "banana", "cherry", "grapes"];
let element = "grapes";
let exists = checkExists(array, element);
console.log(exists); |
#!/bin/bash
set -o pipefail
# config
default_semvar_bump=${DEFAULT_BUMP:-minor}
with_v=${WITH_V:-false}
release_branches=${RELEASE_BRANCHES:-master,main}
custom_tag=${CUSTOM_TAG}
source=${SOURCE:-.}
dryrun=${DRY_RUN:-false}
initial_version=${INITIAL_VERSION:-0.0.0}
tag_context=${TAG_CONTEXT:-repo}
suffix=${PRERELEASE_SUFFIX:-beta}
verbose=${VERBOSE:-true}
cd ${GITHUB_WORKSPACE}/${source}
echo "*** CONFIGURATION ***"
echo -e "\tDEFAULT_BUMP: ${default_semvar_bump}"
echo -e "\tWITH_V: ${with_v}"
echo -e "\tRELEASE_BRANCHES: ${release_branches}"
echo -e "\tCUSTOM_TAG: ${custom_tag}"
echo -e "\tSOURCE: ${source}"
echo -e "\tDRY_RUN: ${dryrun}"
echo -e "\tINITIAL_VERSION: ${initial_version}"
echo -e "\tTAG_CONTEXT: ${tag_context}"
echo -e "\tPRERELEASE_SUFFIX: ${suffix}"
echo -e "\tVERBOSE: ${verbose}"
current_branch=$(git rev-parse --abbrev-ref HEAD)
pre_release="true"
IFS=',' read -ra branch <<< "$release_branches"
for b in "${branch[@]}"; do
echo "Is $b a match for ${current_branch}"
if [[ "${current_branch}" =~ $b ]]
then
pre_release="false"
fi
done
echo "pre_release = $pre_release"
# fetch tags
git fetch --tags
# get latest tag that looks like a semver (with or without v)
case "$tag_context" in
*repo*)
tag=$(git for-each-ref --sort=-v:refname --format '%(refname:lstrip=2)' | grep -E "^v?[0-9]+\.[0-9]+\.[0-9]+$" | head -n1)
pre_tag=$(git for-each-ref --sort=-v:refname --format '%(refname:lstrip=2)' | grep -E "^v?[0-9]+\.[0-9]+\.[0-9]+(-$suffix\.[0-9]+)?$" | head -n1)
;;
*branch*)
tag=$(git tag --list --merged HEAD --sort=-v:refname | grep -E "^v?[0-9]+\.[0-9]+\.[0-9]+$" | head -n1)
pre_tag=$(git tag --list --merged HEAD --sort=-v:refname | grep -E "^v?[0-9]+\.[0-9]+\.[0-9]+(-$suffix\.[0-9]+)?$" | head -n1)
;;
* ) echo "Unrecognised context"; exit 1;;
esac
# if there are none, start tags at INITIAL_VERSION which defaults to 0.0.0
if [ -z "$tag" ]
then
log=$(git log --pretty='%B')
tag="$initial_version"
if [ -z "$pre_tag" ] && $pre_release
then
pre_tag="$initial_version"
fi
else
log=$(git log $tag..HEAD --pretty='%B')
fi
# get current commit hash for tag
tag_commit=$(git rev-list -n 1 $tag)
# get current commit hash
commit=$(git rev-parse HEAD)
if [ "$tag_commit" == "$commit" ]; then
echo "No new commits since previous tag. Skipping..."
echo ::set-output name=tag::$tag
exit 0
fi
# echo log if verbose is wanted
if $verbose
then
echo $log
fi
case "$log" in
*#major* ) new=$(semver -i major $tag); part="major";;
*#minor* ) new=$(semver -i minor $tag); part="minor";;
*#patch* ) new=$(semver -i patch $tag); part="patch";;
*#none* )
echo "Default bump was set to none. Skipping..."; echo ::set-output name=new_tag::$tag; echo ::set-output name=tag::$tag; exit 0;;
* )
if [ "$default_semvar_bump" == "none" ]; then
echo "Default bump was set to none. Skipping..."; echo ::set-output name=new_tag::$tag; echo ::set-output name=tag::$tag; exit 0
else
new=$(semver -i "${default_semvar_bump}" $tag); part=$default_semvar_bump
fi
;;
esac
if $pre_release
then
# Already a prerelease available, bump it
if [[ "$pre_tag" == *"$new"* ]]; then
new=$(semver -i prerelease $pre_tag --preid $suffix); part="pre-$part"
else
new="$new-$suffix.1"; part="pre-$part"
fi
fi
echo $part
# did we get a new tag?
if [ ! -z "$new" ]
then
# prefix with 'v'
if $with_v
then
new="v$new"
fi
fi
if [ ! -z $custom_tag ]
then
new="$custom_tag"
fi
if $pre_release
then
echo -e "Bumping tag ${pre_tag}. \n\tNew tag ${new}"
else
echo -e "Bumping tag ${tag}. \n\tNew tag ${new}"
fi
# set outputs
echo ::set-output name=new_tag::$new
echo ::set-output name=part::$part
# use dry run to determine the next tag
if $dryrun
then
echo ::set-output name=tag::$tag
exit 0
fi
echo ::set-output name=tag::$new
# create local git tag
git tag $new
# push new tag ref to github
dt=$(date '+%Y-%m-%dT%H:%M:%SZ')
full_name=$GITHUB_REPOSITORY
git_refs_url=$(jq .repository.git_refs_url $GITHUB_EVENT_PATH | tr -d '"' | sed 's/{\/sha}//g')
echo "$dt: **pushing tag $new to repo $full_name"
git_refs_response=$(
curl -s -X POST $git_refs_url \
-H "Authorization: token $GITHUB_TOKEN" \
-d @- << EOF
{
"ref": "refs/tags/$new",
"sha": "$commit"
}
EOF
)
git_ref_posted=$( echo "${git_refs_response}" | jq .ref | tr -d '"' )
echo "::debug::${git_refs_response}"
if [ "${git_ref_posted}" = "refs/tags/${new}" ]; then
exit 0
else
echo "::error::Tag was not created properly."
exit 1
fi
|
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import numpy as np
from matplotlib.collections import LineCollection
dates = pd.date_range("2017-01-01", "2017-06-20", freq="7D" )
y = np.cumsum(np.random.normal(size=len(dates)))
s = pd.Series(y, index=dates)
fig, ax = plt.subplots()
#convert dates to numbers first
inxval = mdates.date2num(s.index.to_pydatetime())
points = np.array([inxval, s.values]).T.reshape(-1,1,2)
segments = np.concatenate([points[:-1],points[1:]], axis=1)
print("point: ",points)
print(points.shape)
print("segment: ", segments)
print(segments.shape)
lc = LineCollection(segments, cmap="plasma", linewidth=3)
# set color to date values
lc.set_array(inxval)
# note that you could also set the colors according to y values
# lc.set_array(s.values)
# add collection to axes
ax.add_collection(lc)
ax.xaxis.set_major_locator(mdates.MonthLocator())
ax.xaxis.set_minor_locator(mdates.DayLocator())
monthFmt = mdates.DateFormatter("%b")
ax.xaxis.set_major_formatter(monthFmt)
ax.autoscale_view()
plt.show() |
def handle_path(hmap, path, next_element_type) -> dict:
if len(path) == 1:
return hmap, path[0]
if isinstance(path[1], int):
next_element_type = list
if isinstance(path[0], int) and isinstance(hmap, list):
while len(hmap) < path[0]:
hmap.append(None)
if len(hmap) == path[0]:
hmap.append(next_element_type())
else:
if path[0] not in hmap:
hmap[path[0]] = next_element_type()
return hmap |
#!/bin/sh -l
PROJECT_URL="$INPUT_PROJECT"
if [ -z "$PROJECT_URL" ]; then
echo "PROJECT_URL is not defined." >&2
exit 1
fi
get_project_type() {
_PROJECT_URL="$1"
case "$_PROJECT_URL" in
https://github.com/orgs/*)
echo "org"
;;
https://github.com/users/*)
echo "user"
;;
https://github.com/*/projects/*)
echo "repo"
;;
*)
echo "Invalid PROJECT_URL: $_PROJECT_URL" >&2
exit 1
;;
esac
unset _PROJECT_URL
}
find_project_id() {
_PROJECT_TYPE="$1"
_PROJECT_URL="$2"
case "$_PROJECT_TYPE" in
org)
_ORG_NAME=$(echo "$_PROJECT_URL" | sed -e 's@https://github.com/orgs/\([^/]\+\)/projects/[0-9]\+@\1@')
_ENDPOINT="https://api.github.com/orgs/$_ORG_NAME/projects"
;;
user)
_USER_NAME=$(echo "$_PROJECT_URL" | sed -e 's@https://github.com/users/\([^/]\+\)/projects/[0-9]\+@\1@')
_ENDPOINT="https://api.github.com/users/$_USER_NAME/projects"
;;
repo)
_ENDPOINT="https://api.github.com/repos/$GITHUB_REPOSITORY/projects"
;;
esac
_PROJECTS=$(curl -s -X GET -u "$GITHUB_ACTOR:$TOKEN" --retry 3 \
-H 'Accept: application/vnd.github.inertia-preview+json' \
"$_ENDPOINT")
echo "$_PROJECTS"
_PROJECTID=$(echo "$_PROJECTS" | jq -r ".[] | select(.html_url == \"$_PROJECT_URL\").id")
if [ "$_PROJECTID" != "" ]; then
echo "$_PROJECTID"
else
echo "No project was found." >&2
exit 1
fi
unset _PROJECT_TYPE _PROJECT_URL _ORG_NAME _USER_NAME _ENDPOINT _PROJECTS _PROJECTID
}
find_column_id() {
_PROJECT_ID="$1"
_INITIAL_COLUMN_NAME="$2"
_COLUMNS=$(curl -s -X GET -u "$GITHUB_ACTOR:$TOKEN" --retry 3 \
-H 'Accept: application/vnd.github.inertia-preview+json' \
"https://api.github.com/projects/$_PROJECT_ID/columns")
echo "$_COLUMNS" | jq -r ".[] | select(.name == \"$_INITIAL_COLUMN_NAME\").id"
unset _PROJECT_ID _INITIAL_COLUMN_NAME _COLUMNS
}
PROJECT_TYPE=$(get_project_type "${PROJECT_URL:?<Error> required this environment variable}")
if [ "$PROJECT_TYPE" = org ] || [ "$PROJECT_TYPE" = user ]; then
if [ -z "$MY_GITHUB_TOKEN" ]; then
echo "MY_GITHUB_TOKEN not defined" >&2
exit 1
fi
TOKEN="$MY_GITHUB_TOKEN" # It's User's personal access token. It should be secret.
else
if [ -z "$GITHUB_TOKEN" ]; then
echo "GITHUB_TOKEN not defined" >&2
exit 1
fi
TOKEN="$GITHUB_TOKEN" # GitHub sets. The scope in only the repository containing the workflow file.
fi
INITIAL_COLUMN_NAME="$INPUT_COLUMN_NAME"
if [ -z "$INITIAL_COLUMN_NAME" ]; then
# assing the column name by default
INITIAL_COLUMN_NAME='To do'
if [ "$GITHUB_EVENT_NAME" == "pull_request" ] || [ "$GITHUB_EVENT_NAME" == "pull_request_target" ]; then
echo "changing col name for PR event"
INITIAL_COLUMN_NAME='In progress'
fi
fi
PROJECT_ID=$(find_project_id "$PROJECT_TYPE" "$PROJECT_URL")
INITIAL_COLUMN_ID=$(find_column_id "$PROJECT_ID" "${INITIAL_COLUMN_NAME:?<Error> required this environment variable}")
if [ -z "$INITIAL_COLUMN_ID" ]; then
echo "INITIAL_COLUMN_ID is not found." >&2
exit 1
fi
case "$GITHUB_EVENT_NAME" in
issues)
ISSUE_ID=$(jq -r '.issue.id' < "$GITHUB_EVENT_PATH")
# Add this issue to the project column
curl -s -X POST -u "$GITHUB_ACTOR:$TOKEN" --retry 3 \
-H 'Accept: application/vnd.github.inertia-preview+json' \
-d "{\"content_type\": \"Issue\", \"content_id\": $ISSUE_ID}" \
"https://api.github.com/projects/columns/$INITIAL_COLUMN_ID/cards"
;;
pull_request|pull_request_target)
PULL_REQUEST_ID=$(jq -r '.pull_request.id' < "$GITHUB_EVENT_PATH")
# Add this pull_request to the project column
curl -s -X POST -u "$GITHUB_ACTOR:$TOKEN" --retry 3 \
-H 'Accept: application/vnd.github.inertia-preview+json' \
-d "{\"content_type\": \"PullRequest\", \"content_id\": $PULL_REQUEST_ID}" \
"https://api.github.com/projects/columns/$INITIAL_COLUMN_ID/cards"
;;
*)
echo "Nothing to be done on this action: $GITHUB_EVENT_NAME" >&2
exit 1
;;
esac
|
function histogram(data) {
const max = Math.max(...data);
let output = Array(max).fill(" ");
for (let i = 0; i < data.length; i++) {
let num = data[i];
if (output[num - 1] === " ") {
output[num - 1] = "*";
} else {
output[num - 1] += "*";
}
}
console.log(output.join(""));
}
histogram([20, 40, 30, 70, 10, 40, 40]); |
#!/bin/bash
#
# Run unit test and generate test coverage report
#
# absolute directory
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
BASE_PKG_DIR="github.com/kafkaesque-io/pulsar-beam/src/"
ALL_PKGS=""
cd $DIR/../src
for d in */ ; do
if [[ ${d} != "unit-test/" && ${d} != "e2e/" && ${d} != "docs/" ]] # exclude unit-test for test coverage
then
pkg=${d%/}
ALL_PKGS=${ALL_PKGS}","${BASE_PKG_DIR}${pkg}
fi
done
ALL_PKGS=$(echo $ALL_PKGS | sed 's/^,//')
echo $ALL_PKGS
cd $DIR/../src/unit-test
go test ./... -coverpkg=$ALL_PKGS -covermode=count -coverprofile coverage.out
# to be uploaded to covercov
cp coverage.out $DIR/../coverage.txt
go tool cover -func coverage.out > /tmp/coverage2.txt
coverPercent=$(cat /tmp/coverage2.txt | grep total: | awk '{print $3}' | sed 's/%$//g')
echo "Current test coverage is at ${coverPercent}%"
echo "TODO add code coverage verdict"
|
angular
.module('pulse')
.controller('marketingActCtl', marketingActCtl);
function marketingActCtl($scope, API, $location) {
debugger;
API.market( function(data){
debugger;
$scope.actList = data;
}, function(s){
debugger;
alert("get data error");
}
);
/*-== 时间控件start ==-*/
var beginTime = $("#datetime_begin");
var endTime = $("#datetime_end");
//初始化日期时间控件
beginTime.datetimepicker({
format: 'Y-m-d H:00',
lang: 'ch',
onShow: function() {
}
});
endTime.datetimepicker({
format: 'Y-m-d H:00',
lang: 'ch',
onShow: function() {
}
});
/*-== 时间控件结束 ==-*/
} |
<reponame>OpenSundsvall/api-service-messaging
package se.sundsvall.messaging.integration.smssender;
import java.time.Duration;
import org.springframework.boot.context.properties.ConfigurationProperties;
import se.sundsvall.messaging.integration.AbstractRestIntegrationProperties;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
@ConfigurationProperties(prefix = "integration.sms-sender")
public class SmsSenderIntegrationProperties extends AbstractRestIntegrationProperties {
private Duration pollDelay = Duration.ofSeconds(5);
private int maxRetries = 3;
}
|
module HardcodedEnumeration
class Base
attr_reader :symbol
attr_reader :id
def self.enumerates(*symbol_list)
@instances = []
@instances_by_symbol = {}
@instances_by_id = {}
symbol_list.each_with_index do |symbol, id|
instance = new(symbol, id + 1)
@instances << instance
@instances_by_id[instance.id] = instance
@instances_by_symbol[instance.symbol] = instance
define_method("#{symbol}?") do
self == instance
end
end
end
def self.instances
@instances
end
def self.find_by_key(key)
case key
when self then key
when Symbol then @instances_by_symbol[key]
when Integer then @instances_by_id[key]
when String then @instances_by_id[key.to_i]
end
end
def self.find(key)
key == :all ? @instances : self[key]
end
def self.[](key)
find_by_key(key) or raise ActiveRecord::RecordNotFound.new("No #{self.class} instance for #{key} #{key.class}")
end
def ==(key)
RAILS_DEFAULT_LOGGER.debug "Comparing #{self.inspect} to #{key}(#{key.class})"
case key
when Symbol then @symbol == key
when self.class then @id == key.id
else
false
end
end
def inspect
"#<#{self.class}:#{self.object_id}(id: #{@id}, symbol: #{@symbol})>"
end
def to_param
id
end
def to_s
symbol.to_s
end
def to_sym
symbol
end
protected
def initialize(symbol, id)
@symbol = symbol
@id = id
end
end
end |
<filename>src/app/components/board/directive.js
import ctrl from './controller';
let Directive = () => {
return {
restrict: 'E',
template: require('./view.html'),
scope: true,
link: (scope, element) => {
},
controller: ['', ctrl]
};
};
export default Directive;
|
# ---
# Category: String
# Since: 0.1.0
# Usage: echo <string> | l.trim_end.p [chars=[[:space:]]]
# Description: The pipeline version l.trim_end
# Dependent: trim_end
# ---
l.trim_end.p() {
local str
IFS='' read -r str
if (( $# == 0 )); then
l.trim_end "$str"
else
l.trim_end "$str" "${@}"
fi
}
|
var React = require('react');
module.exports = React.createClass({
getInitialState: function() {
return (
{
phone: this.props.phone,
pin: "",
sendCodePressed: false,
verified: this.props.verified,
errors: []
});
},
handleInputChange: function(e) {
var obj = {},
key = e.target.id,
val = e.target.value;
obj[key] = val;
if (key === "phone")
obj["verified"] = false;
this.setState(obj);
},
handleSendCodeClicked: function(){
// regex phone
var phone = this.state.phone,
errors = [];
if (!/^\d+$/.test(phone))
errors.push("Phone must be digits only");
if (phone.length !== 10)
errors.push("Phone must be 10 digits only");
this.setState({errors: errors});
if (errors.length > 0)
return;
var api_resp = api_call("kitchenuser", {
method: "updatePhone",
session: Cookies.get("session"),
phone: phone
});
if (api_resp.Success)
this.setState({sendCodePressed: true});
},
handleVerifyClicked: function() {
var pin = this.state.pin;
if (!pin || pin.length < 1) {
this.setState({errors: ["Code cannot be empty"]});
return;
}
var api_resp = api_call("kitchenuser", {
method: "verifyPhone",
session: Cookies.get("session"),
pin: pin
});
var errors = [];
if (api_resp.Success)
this.props.success("phoneAdded");
else
errors.push(api_resp.Error);
this.setState({
verified: api_resp.Success,
errors: errors,
});
},
render: function() {
var s = this.state;
var fontsize = {"fontSize":"24px"};
var verifyButton =
<button className="c-blue transparent-bg"
onClick={this.handleVerifyClicked}
disabled={s.verified}
style={fontsize}>{
(s.verified)?
<i className="fa fa-check-circle-o active-green"></i> :
<i className="fa fa-arrow-circle-o-right"></i>
}</button>
var verifyInput = (s.sendCodePressed)?
<div className="input-group">
<input type="text"
placeholder="Verify Code"
value={s.pin}
id="pin"
className="text-field"
onChange={this.handleInputChange}></input>
<span className="input-group-btn">
{verifyButton}
</span>
</div> : "";
var sendPinText;
if (s.verified)
sendPinText = "Verified";
else if (s.sendCodePressed)
sendPinText = "Resend PIN";
else
sendPinText = "Send PIN";
return (
<div>
<div className="input-group">
<input type="text"
placeholder="1234567890"
value={s.phone}
id="phone"
className="text-field"
onChange={this.handleInputChange}></input>
<span className="input-group-btn">
<button className="brand-btn"
onClick={this.handleSendCodeClicked}
disabled={s.verified}>{sendPinText}</button>
</span>
</div>
{verifyInput}
<ul className="error-field text-left">
{(s.errors)?
s.errors.map(function(error){return <li>{error}</li>}) : ""}
</ul>
</div>);
}
}); |
declare interface IPropertiesValidatorsStrings {
PropertyPaneDescription: string;
BasicGroupName: string;
DescriptionFieldLabel: string;
}
declare module 'propertiesValidatorsStrings' {
const stringsValidators: IPropertiesValidatorsStrings;
export = stringsValidators;
}
|
<reponame>MarceloFigueira/erxes-widgets
import gql from "graphql-tag";
import * as React from "react";
import { ChildProps, graphql } from "react-apollo";
import { Articles as DumbArticles } from "../components";
import { connection } from "../connection";
import { IKbArticle } from "../types";
import { AppConsumer } from "./AppContext";
import queries from "./graphql";
type QueryResponse = {
knowledgeBaseArticles: IKbArticle[];
};
const Articles = (props: ChildProps<{}, QueryResponse>) => {
const { data } = props;
if (!data || data.loading) {
return null;
}
const extendedProps = {
articles: data.knowledgeBaseArticles || []
};
return <DumbArticles {...extendedProps} />;
};
const WithData = graphql<{ searchString: string }, QueryResponse>(
gql(queries.kbSearchArticlesQuery),
{
options: ownProps => ({
fetchPolicy: "network-only",
variables: {
topicId: connection.setting.topic_id,
searchString: ownProps.searchString
}
})
}
)(Articles);
const WithContext = () => (
<AppConsumer>
{({ searchString }) => <WithData searchString={searchString} />}
</AppConsumer>
);
export default WithContext;
|
import babel from 'rollup-plugin-babel';
import url from '@rollup/plugin-url';
import clear from 'rollup-plugin-clear';
import postcss from 'rollup-plugin-postcss';
import localResolve from 'rollup-plugin-local-resolve';
import {base64} from '@webex/common';
export default {
plugins: [
// Clears the destination directory before building
clear({
// required, point out which directories should be clear.
targets: ['es']
}),
// Finds the index.js file when importing via folder
localResolve(),
// Convert css to css modules
postcss({
modules: {
generateScopedName: (name, filename, css) => {
let componentName;
const cssHash = base64.encode(css).substring(0, 8);
const paths = filename.split('/');
let index = paths.indexOf('@ciscospark');
if (index === -1) {
index = paths.indexOf('@webex');
}
if (index !== -1) {
componentName = paths[index + 1];
}
else {
componentName = filename;
}
return `${componentName}__${name}__${cssHash}`;
}
},
// Don't use sass loader due to momentum-ui issues
use: [],
config: false
}),
// Inline images
url({
limit: 100 * 1024 // inline files < 100k, copy files > 100k
}),
// Audio files for ringtones
url({
limit: 0,
include: ['**/*.mp3']
}),
babel({
babelrc: false,
exclude: 'node_modules/**',
plugins: [
[
// Support for @autobind decorators
'@babel/plugin-proposal-decorators',
{
legacy: true
}
],
'@babel/plugin-proposal-object-rest-spread',
'@babel/plugin-transform-exponentiation-operator'
],
presets: [
'@babel/preset-react'
]
})
],
input: 'src/index.js',
output: [{
dir: 'es',
format: 'esm',
sourcemap: true
}],
external: ['react', 'react-dom', 'prop-types', 'classnames', '@momentum-ui/react']
};
|
public class SIPHandler {
private int localSipPort;
private String natIp;
private String transport;
private boolean fixSdpForNat;
private NetworkAddress proxyAddr;
public SIPHandler(int _localSipPort, String _natIp, String _transport, boolean fix_sdp_for_nat, String _proxyAddr) {
localSipPort = _localSipPort;
natIp = _natIp;
transport = _transport;
fixSdpForNat = fix_sdp_for_nat;
proxyAddr = new NetworkAddress(_proxyAddr);
// NOTE: It's ok for proxyAddr to be 0.0.0.0, just means we won't try to register!
}
public void registerWithProxy() {
if (!proxyAddr.getIpAddress().equals("0.0.0.0")) {
// Initiate registration process with the proxy server
// Add your registration logic here
System.out.println("Registering with proxy server at: " + proxyAddr.getIpAddress());
} else {
System.out.println("Proxy address is 0.0.0.0, skipping registration");
}
}
// Add other methods and member variables as needed for the SIPHandler class
} |
#!/bin/sh
set -eu
clang -fbracket-depth=999999 -march=native -mtune=native -std=gnu11 -O3 -flto -fuse-ld=lld -fomit-frame-pointer -fwrapv -Wno-attributes -fno-strict-aliasing -Da24_hex='0x3039' -Da24_val='12345' -Da_minus_two_over_four_array='{0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x30,0x39}' -Dbitwidth='64' -Dlimb_weight_gaps_array='{57,57,57,57,57}' -Dmodulus_array='{0x1f,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xf7}' -Dmodulus_bytes_val='36' -Dmodulus_limbs='5' -Dq_mpz='(1_mpz<<285) - 9' "$@"
|
class Funnel:
def __init__(self, capacity, leaking_rate):
self.capacity = capacity
self.leaking_rate = leaking_rate
self.liquid_level = 0
def leak(self, time):
leaked_amount = self.leaking_rate * time
self.liquid_level = max(0, self.liquid_level - leaked_amount)
def fill(self, amount):
self.liquid_level = min(self.capacity, self.liquid_level + amount)
def get_level(self):
return self.liquid_level
funnels = {} # Pre-initialized dictionary to store funnels
def create_or_get_funnel(key, capacity, leaking_rate):
funnel = funnels.get(key)
if not funnel:
funnel = Funnel(capacity, leaking_rate)
funnels[key] = funnel
return funnel |
package com.github.paolorotolo.gitty_reporter;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.os.AsyncTask;
import android.os.Build;
import androidx.appcompat.app.AlertDialog;
import org.eclipse.egit.github.core.Issue;
import org.eclipse.egit.github.core.client.GitHubClient;
import org.eclipse.egit.github.core.service.IssueService;
import java.io.IOException;
class reportIssue extends AsyncTask<String, Integer, String> {
private final Context mContext;
private final GittyReporter mActivity;
private ProgressDialog progress;
reportIssue(Context context, GittyReporter activity){
mContext = context;
mActivity = activity;
}
// Runs in UI before background thread is called
@Override
protected void onPreExecute() {
super.onPreExecute();
progress = ProgressDialog.show(mContext, mContext.getResources().getString(R.string.dialog_progress_pleaseWait_title),
mContext.getString(R.string.dialog_progress_pleaseWait_message), true);
}
// This is run in a background thread
@Override
protected String doInBackground(String... params) {
// get the string from params, which is an array
String user = params[0];
String password = params[1];
String bugTitle = params[2];
String bugDescription = params[3];
String deviceInfo = params[4];
String targetUser = params[5];
String targetRepository = params[6];
String extraInfo = params[7];
String gitToken = params[8];
IssueService service;
if (user.equals("")) {
service = new IssueService(new GitHubClient().setOAuth2Token(gitToken));
} else {
service = new IssueService(new GitHubClient().setCredentials(user, password));
}
Issue issue = new Issue().setTitle(bugTitle).setBody(bugDescription + "\n\n" + deviceInfo + mContext.getString(R.string.issue_extraInfo) + extraInfo);
try {
service.createIssue(targetUser, targetRepository, issue);
//noinspection HardCodedStringLiteral
return "ok";
} catch (IOException e) {
e.printStackTrace();
return e.toString();
}
}
// This is called from background thread but runs in UI
@Override
protected void onProgressUpdate(Integer... values) {
super.onProgressUpdate(values);
}
// This runs in UI when background thread finishes
@Override
protected void onPostExecute(String result) {
super.onPostExecute(result);
switch (result) {
//noinspection HardCodedStringLiteral
case "ok":
progress.dismiss();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
mActivity.showDoneAnimation();
} else {
((Activity) mContext).finish();
}
break;
//noinspection HardCodedStringLiteral
case "org.eclipse.egit.github.core.client.RequestException: Bad credentials (401)":
progress.dismiss();
new AlertDialog.Builder(mContext)
.setTitle(mContext.getResources().getString(R.string.report_unableToSendReport_title))
.setMessage(mContext.getResources().getString(R.string.report_unableToSendReport_messageBadCredentials))
.setPositiveButton(mContext.getResources().getString(R.string.report_unableToSendReport_button_tryAgain), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
// do nothing
}
})
.setIcon(R.drawable.gittyreporter_ic_mood_bad_black_24dp)
.show();
break;
default:
progress.dismiss();
new AlertDialog.Builder(mContext)
.setTitle(mContext.getResources().getString(R.string.report_unableToSendReport_title))
.setMessage(R.string.report_unableToSendReport_messageUnexpectedError)
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
((Activity) mContext).finish();
}
})
.setIcon(R.drawable.gittyreporter_ic_mood_bad_black_24dp)
.show();
break;
}
}
} |
package org.rs2server.rs2.model.map.path.astar;
import com.google.common.collect.ImmutableList;
import org.rs2server.rs2.model.Location;
import org.rs2server.rs2.model.Mob;
import org.rs2server.rs2.model.map.RegionClipping;
import org.rs2server.rs2.model.map.path.ClippingFlag;
import org.rs2server.rs2.model.map.path.PathFinder;
import org.rs2server.rs2.model.map.path.PathPrecondition;
import org.rs2server.rs2.model.map.path.TilePath;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
public class AStarPathFinder implements PathFinder {
/**
* Represents a node used by the A* algorithm.
*
* @author <NAME>
*/
private static class Node implements Comparable<Node> {
/**
* The cost.
*/
private int cost = 1000;
/**
* The parent node.
*/
private Node parent = null;
/**
* The x coordinate.
*/
private final int x;
/**
* The y coordinate.
*/
private final int y;
/**
* Creates a node.
*
* @param x The x coordinate.
* @param y The y coordinate.
*/
public Node(int x, int y) {
this.x = x;
this.y = y;
}
public int getCost() {
return cost;
}
/**
* Gets the parent node.
*
* @return The parent node.
*/
public Node getParent() {
return parent;
}
/**
* Gets the X coordinate.
*
* @return The X coordinate.
*/
public int getX() {
return x;
}
/**
* Gets the Y coordinate.
*
* @return The Y coordinate.
*/
public int getY() {
return y;
}
public void setCost(int cost) {
this.cost = cost;
}
/**
* Sets the parent.
*
* @param parent The parent.
*/
public void setParent(Node parent) {
this.parent = parent;
}
@Override
public int compareTo(Node node) {
return cost < node.cost ? 1 : cost > node.cost ? -1 : 0;
}
}
private static final Logger logger = LoggerFactory.getLogger(AStarPathFinder.class);
private static final int MAX_PATH_SIZE = 512;
private static final int MAX_ITERATIONS = 2056;
private int lowestH = 999999;
private Node lowestN;
private Node dest;
private final List<PathPrecondition> preconditions;
private final Set<Node> closed = new HashSet<Node>();
private final Node[][] nodes = new Node[104][104];
private final Set<Node> open = new HashSet<>();
private Node current;
public AStarPathFinder() {
this(ImmutableList.of());
}
public AStarPathFinder(final List<PathPrecondition> preconditions) {
this.preconditions = preconditions;
for (int x = 0; x < 104; x++) {
for (int y = 0; y < 104; y++) {
nodes[x][y] = new Node(x, y);
}
}
}
/**
* Estimates a distance between the two points.
*
* @param src The source node.
* @param dst The distance node.
* @return The distance.
*/
public int estimateDistance(Node src, Node dst) {
int deltaX = src.getX() - dst.getX();
int deltaY = src.getY() - dst.getY();
return Math.abs(deltaX) + Math.abs(deltaY);
}
/**
* Opens or closes the given node depending on the cost.
*
* @param n
*/
private void examineNode(Node n) {
int heuristic = estimateDistance(current, n);
int nextStepCost = current.getCost() + heuristic;
int hEnd = estimateDistance(current, dest);
if (hEnd < lowestH) {
lowestH = hEnd;
lowestN = current;
}
if (nextStepCost < n.getCost()) {
open.remove(n);
closed.remove(n);
}
if (!open.contains(n) && !closed.contains(n)) {
n.setParent(current);
n.setCost(nextStepCost);
open.add(n);
}
}
@Override
public List<Location> findPath(Location source, Location destination) {
final int srcX = source.getLocalX();
final int srcY = source.getLocalY();
final int dstX = destination.getLocalX(source);
final int dstY = destination.getLocalY(source);
if (srcX < 0 || srcY < 0 || srcX >= 104 || srcY >= 104 || dstX < 0 || dstY < 0 || dstX >= 104 || dstY >= 104) {
logger.info("Source or destination coordinates outside of region space!");
return null;
}
if (srcX == dstX && srcY == dstY) {
return ImmutableList.of(source);
}
// The base region coordinates (bottom left of the region?).
Location regionBase = Location.create((source.getRegionX() - 6) << 3, (source.getRegionY() - 6) << 3, source.getZ());
final int z = regionBase.getZ();
Node src = nodes[srcX][srcY];
src.setCost(0);
this.dest = nodes[dstX][dstY];
open.add(src);
int iterations = 0;
final long startTime = System.currentTimeMillis();
search:
while (open.size() > 0 && ++iterations < MAX_ITERATIONS) {
current = getLowestCostOpenNode();
if (current == dest) {
break;
}
open.remove(current);
closed.add(current);
final int x = current.getX();
final int y = current.getY();
final int absX = regionBase.getX() + x;
final int absY = regionBase.getY() + y;
// Terminate the path if any preconditions are met
for (final PathPrecondition precondition : preconditions) {
if (precondition.targetReached(absX, absY, destination.getX(), destination.getY())) {
// Since the precondition is satisfied at the current location, this is now the destination.
dest = current;
open.clear();
break search;
}
}
if (y > 0 && ClippingFlag.BLOCK_NORTH.and(RegionClipping.getClippingMask(absX, absY - 1, z)) == 0) {
Node n = nodes[x][y - 1];
examineNode(n);
}
if (x > 0 && ClippingFlag.BLOCK_EAST.and(RegionClipping.getClippingMask(absX - 1, absY, z)) == 0) {
Node n = nodes[x - 1][y];
examineNode(n);
}
if (y < 104 - 1 && ClippingFlag.BLOCK_SOUTH.and(RegionClipping.getClippingMask(absX, absY + 1, z)) == 0) {
Node n = nodes[x][y + 1];
examineNode(n);
}
if (x < 104 - 1 && ClippingFlag.BLOCK_WEST.and(RegionClipping.getClippingMask(absX + 1, absY, z)) == 0) {
Node n = nodes[x + 1][y];
examineNode(n);
}
int mask = RegionClipping.getClippingMask(absX, absY, z);
int maskN = RegionClipping.getClippingMask(absX, absY + 1, z);
int maskNE = RegionClipping.getClippingMask(absX + 1, absY + 1, z);
int maskE = RegionClipping.getClippingMask(absX + 1, absY, z);
int maskSE = RegionClipping.getClippingMask(absX + 1, absY - 1, z);
int maskS = RegionClipping.getClippingMask(absX, absY - 1, z);
int maskSW = RegionClipping.getClippingMask(absX - 1, absY - 1, z);
int maskW = RegionClipping.getClippingMask(absX - 1, absY, z);
int maskNW = RegionClipping.getClippingMask(absX - 1, absY + 1, z);
// South-west
if (x > 0 && y > 0) {
if (ClippingFlag.BLOCK_NORTH_EAST.and(RegionClipping.getClippingMask(absX - 1, absY - 1, z)) == 0
&& ClippingFlag.BLOCK_EAST.and(RegionClipping.getClippingMask(absX - 1, absY, z)) == 0
&& ClippingFlag.BLOCK_NORTH.and(RegionClipping.getClippingMask(absX, absY - 1, z)) == 0
/*&& ClippingFlag.BLOCK_SOUTH_WEST.and(mask) == 0
&& ClippingFlag.BLOCK_SOUTH.and(mask) == 0
&& ClippingFlag.BLOCK_WEST.and(mask) == 0*/) {
Node n = nodes[x - 1][y - 1];
examineNode(n);
}
}
// North-west
if (x > 0 && y < 104 - 1) {
if (ClippingFlag.BLOCK_SOUTH_EAST.and(RegionClipping.getClippingMask(absX - 1, absY + 1, z)) == 0
&& ClippingFlag.BLOCK_EAST.and(RegionClipping.getClippingMask(absX - 1, absY, z)) == 0
&& ClippingFlag.BLOCK_SOUTH.and(RegionClipping.getClippingMask(absX, absY + 1, z)) == 0
/*&& ClippingFlag.BLOCK_NORTH_WEST.and(mask) == 0
&& ClippingFlag.BLOCK_NORTH.and(mask) == 0
&& ClippingFlag.BLOCK_WEST.and(mask) == 0*/) {
Node n = nodes[x - 1][y + 1];
examineNode(n);
}
}
// South-east
if (x < 104 - 1 && y > 0) {
if (ClippingFlag.BLOCK_NORTH_WEST.and(RegionClipping.getClippingMask(absX + 1, absY - 1, z)) == 0
&& ClippingFlag.BLOCK_WEST.and(RegionClipping.getClippingMask(absX + 1, absY, z)) == 0
&& ClippingFlag.BLOCK_NORTH.and(RegionClipping.getClippingMask(absX, absY - 1, z)) == 0
/*&& ClippingFlag.BLOCK_SOUTH_EAST.and(mask) == 0
&& ClippingFlag.BLOCK_SOUTH.and(mask) == 0
&& ClippingFlag.BLOCK_EAST.and(mask) == 0*/) {
Node n = nodes[x + 1][y - 1];
examineNode(n);
}
}
// North-east
if (x < 104 - 1 && y < 104 - 1) {
if (ClippingFlag.BLOCK_SOUTH_WEST.and(RegionClipping.getClippingMask(absX + 1, absY + 1, z)) == 0
&& ClippingFlag.BLOCK_WEST.and(RegionClipping.getClippingMask(absX + 1, absY, z)) == 0
&& ClippingFlag.BLOCK_SOUTH.and(RegionClipping.getClippingMask(absX, absY + 1, z)) == 0
/*&& ClippingFlag.BLOCK_NORTH_EAST.and(mask) == 0
&& ClippingFlag.BLOCK_NORTH.and(mask) == 0
&& ClippingFlag.BLOCK_EAST.and(mask) == 0*/) {
Node n = nodes[x + 1][y + 1];
examineNode(n);
}
}
}
final long duration = System.currentTimeMillis() - startTime;
if (duration > 30) {
logger.warn("Path search with {} iterations took {}ms.", iterations, duration);
}
if (dest.getParent() == null) {
dest = lowestN;
}
if (dest == null || dest.getParent() == null) {
return null;
}
final LinkedList<Location> nodeList = new LinkedList<>();
int prependedNodes = 0;
// Walks the list backwards, from destination to source, and constructs a path.
Node node = dest;
while (node != null && dest != src) {
if (++prependedNodes >= MAX_PATH_SIZE) {
break;
}
int absX = regionBase.getX() + node.getX();
int absY = regionBase.getY() + node.getY();
nodeList.addFirst(Location.create(absX, absY, z));
node = node.getParent();
}
return nodeList;
}
@Override
public TilePath findPath(Mob mob, Location base, int srcX, int srcY, int dstX, int dstY, int z, int radius, boolean running, boolean ignoreLastStep, boolean moveNear) {
return null;
}
private Node getLowestCostOpenNode() {
Node curLowest = null;
for (Node n : open) {
if (curLowest == null) {
curLowest = n;
} else {
if (n.getCost() < curLowest.getCost()) {
curLowest = n;
}
}
}
return curLowest;
}
}
|
#!/bin/bash
# Copyright 2019 The KubeEdge Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
SCRIPT_ROOT=$(unset CDPATH && cd $(dirname "${BASH_SOURCE[0]}")/../.. && pwd)
${SCRIPT_ROOT}/cloud/hack/generate-groups.sh "deepcopy,client,informer,lister" \
github.com/kubeedge/kubeedge/cloud/pkg/client github.com/kubeedge/kubeedge/cloud/pkg/apis \
"devices:v1alpha1 reliablesyncs:v1alpha1" \
--go-header-file ${SCRIPT_ROOT}/cloud/hack/boilerplate/boilerplate.txt
|
#!/bin/bash
# Exit on any command failure
set -e
DIR=$(pwd)
if [ ! -z $1 ]; then
DIR="$1"
fi
if [ "$DIR" == "." ]; then
DIR=$(pwd)
fi
# Check DIR exists
if [ ! -d "$DIR" ]; then
echo "Directory not found: $DIR"
exit
fi
# Make sure proto folder exists
PROTO_DIR=${DIR}/proto
if [ ! -d "$PROTO_DIR" ]; then
echo "No proto folder found: $PROTO_DIR"
exit
fi
# Find SRCPATH
SRCPATH=$(cd "$DIR" && cd ../../../../.. && pwd)
if [ ! -d "$SRCPATH/github.com/hailocab/" ]; then
echo "Invalid SRCPATH: $SRCPATH"
exit
fi
# Wrap our exit with a message
# Clean up when we exit for any reason
function trap_handler {
MYSELF="$0" # equals to my script name
LASTLINE="$1" # argument 1: last line of error occurence
LASTERR="$2" # argument 2: error code of last command
echo "Error: line ${LASTLINE} - exit status of last command: ${LASTERR}"
exit $2
}
trap 'trap_handler ${LINENO} ${$?}' ERR
# Check we have the dependencies to actually build things
echo "- Checking dependencies..."
which protoc
which protoc-gen-go
echo "- Building protobuf code..."
rm -rf "${PROTO_DIR}/java"
# Delete .pb.go files if they exist
# Turns out xargs on a Mac doesn't have the -r flag as BSD doesn't need it
# FreeBSD does have this to ensure GNU compatibility but ignores it
# Srlsy guise...
unamestr=`uname`
if [[ "$unamestr" == 'Darwin' ]]; then
find ${PROTO_DIR} -name '*.pb.go' | xargs rm -f
else
find ${PROTO_DIR} -name '*.pb.go' | xargs -r rm -f
fi
find ${PROTO_DIR} -name '*.proto' -exec echo {} \;
find ${PROTO_DIR} -name '*.proto' -exec protoc -I$SRCPATH --go_out=${SRCPATH} {} \;
CHANGES=$(cd $PROTO_DIR && git diff --name-only)
[ ! -z "$CHANGES" ] && echo "- Changes" && echo "$CHANGES" | grep --color=never proto
echo "DONE"
|
console.log('mostrando'); |
export type HumanSize = {
size: string
unit: string
}
export type RepoInfo = {
owner: string
name: string
}
export type PartialGitHubRepo = {
data: {
repository: {
diskUsage: number
}
}
}
export type PartialGitHubRepoRestV3 = {
size: number
}
|
<reponame>leongaban/redux-saga-exchange<gh_stars>1-10
export interface IMakePayoutRequest {
assetId: string;
amount: number;
paymentSystem: 'LiquidityPool';
}
export interface IPandaDocsRequest {
firstName: string;
lastName: string;
email: string;
amount: number;
address: string;
}
export interface IPandaDocsResponse {
pandaDocUrl: string;
pandaDocId: string;
}
|
from arango import ArangoClient
class ArangoDBManager:
def __init__(self, app):
self.app = app
self.client = None
self.db = None
def connect(self):
host = self.app.config.get("ARANGODB_HOST", None)
db_name = self.app.config.get("ARANGODB_DB", None)
if host and db_name:
client = ArangoClient(host=host)
self.client = client
self.db = client.db(db_name)
print("Connected to ArangoDB")
def teardown(self):
if self.client:
self.client.close()
print("ArangoDB connection closed")
# Example usage:
# Assuming app is a Flask app instance
# arango_manager = ArangoDBManager(app)
# arango_manager.connect()
# ...
# arango_manager.teardown() |
#!/usr/bin/env bash
#
# Invokes auction.undelegate entry point.
# Globals:
# NCTL - path to nctl home directory.
# Arguments:
# Network ordinal identifier (int).
# Node ordinal identifier (int).
# User ordinal identifier (int).
# Bid amount (motes).
# Delegation rate (float).
#######################################
# Destructure input args.
#######################################
# Unset to avoid parameter collisions.
unset amount
unset gas
unset net
unset node
unset payment
unset user
unset validator
# Destructure.
for ARGUMENT in "$@"
do
KEY=$(echo $ARGUMENT | cut -f1 -d=)
VALUE=$(echo $ARGUMENT | cut -f2 -d=)
case "$KEY" in
amount) amount=${VALUE} ;;
gas) gas=${VALUE} ;;
net) net=${VALUE} ;;
node) node=${VALUE} ;;
payment) payment=${VALUE} ;;
user) user=${VALUE} ;;
validator) validator=${VALUE} ;;
*)
esac
done
# Set defaults.
amount=${amount:-$NCTL_DEFAULT_AUCTION_DELEGATE_AMOUNT}
payment=${payment:-$NCTL_DEFAULT_GAS_PAYMENT}
gas=${gas:-$NCTL_DEFAULT_GAS_PRICE}
net=${net:-1}
node=${node:-1}
user=${user:-1}
validator=${validator:-1}
#######################################
# Main
#######################################
# Import utils.
source $NCTL/sh/utils/misc.sh
# Set deploy params.
delegator_public_key=$(get_account_key $net $NCTL_ACCOUNT_TYPE_USER $user)
delegator_secret_key=$(get_path_to_secret_key $net $NCTL_ACCOUNT_TYPE_USER $user)
delegator_purse_uref="TODO"
node_address=$(get_node_address_rpc $net $node)
path_contract=$(get_path_to_contract $net "undelegate.wasm")
validator_public_key=$(get_account_key $net $NCTL_ACCOUNT_TYPE_NODE $validator)
# Inform.
log "dispatching deploy -> undelegate.wasm"
log "... network = $net"
log "... node = $node"
log "... node address = $node_address"
log "... contract = $path_contract"
log "... delegator id = $user"
log "... delegator secret key = $delegator_secret_key"
log "... amount = $amount"
# Dispatch deploy.
deploy_hash=$(
$(get_path_to_client $net) put-deploy \
--chain-name casper-net-$net \
--gas-price $gas \
--node-address $node_address \
--payment-amount $payment \
--secret-key $delegator_secret_key \
--session-arg "amount:u512='$amount'" \
--session-arg "delegator:public_key='$delegator_public_key'" \
--session-arg "validator:public_key='$validator_public_key'" \
--session-arg "unbond_purse:uref-='$user_purse_uref'" \
--session-path $path_contract \
--ttl "1day" \
| jq '.result.deploy_hash' \
| sed -e 's/^"//' -e 's/"$//'
)
# Display deploy hash.
log "deploy dispatched:"
log "... deploy hash = $deploy_hash"
|
#!/bin/bash -ex
# Work in progress: create a list of commands necessary to get Stackage
# up-and-running on a freshly installed Debian-based system (including Ubuntu).
# Quick start:
# wget -O - https://raw.github.com/fpco/stackage/master/debian-bootstrap.sh | bash -ex
# NOTE: Requires that GHC and Cabal are installed and on your PATH. For
# instructions, see:
# http://www.stackage.org/install
add-apt-repository -y ppa:zoogie/sdl2-snapshots
add-apt-repository -y ppa:marutter/rrutter
add-apt-repository -y ppa:openstack-ubuntu-testing/icehouse
apt-get update
apt-get install -y \
build-essential \
freeglut3-dev \
git \
libadns1-dev \
libblas-dev \
libbz2-dev \
libcairo2-dev \
libcurl4-openssl-dev \
libdevil-dev \
libedit-dev \
libedit2 \
libfftw3-dev \
libfreenect-dev \
libgd2-xpm-dev \
libglib2.0-dev \
libglu1-mesa-dev \
libgmp3-dev \
libgsasl7-dev \
libgsl0-dev \
libgtk-3-dev \
libgtk2.0-dev \
libhidapi-dev \
libicu-dev \
libjudy-dev \
liblapack-dev \
libleveldb-dev \
liblzma-dev \
libmagic-dev \
libmysqlclient-dev \
libncurses-dev \
libnotify-dev \
libopenal-dev \
libpango1.0-dev \
libpcap0.8-dev \
libphash0-dev \
libpq-dev \
libsdl2-dev \
libsnappy-dev \
libsndfile1-dev \
libsqlite3-dev \
libssl-dev \
libtagc0-dev \
libtre-dev \
libudev-dev \
libusb-1.0-0-dev \
libxau-dev \
libxml2-dev \
libxss-dev \
libyaml-dev \
libzmq3-dev \
llvm \
m4 \
nodejs \
npm \
r-base \
r-base-dev \
texlive-full \
wget \
zip \
zlib1g-dev
mkdir /tmp/nettle-build
(
cd /tmp/nettle-build
wget https://ftp.gnu.org/gnu/nettle/nettle-2.7.1.tar.gz
tar zxf nettle-2.7.1.tar.gz
cd nettle-2.7.1
./configure --prefix=/usr
make
make install
mkdir -p /usr/lib/x86_64-linux-gnu/
ln -sfv /usr/lib/libnettle.so.4.7 /usr/lib/x86_64-linux-gnu/libnettle.so.4
)
rm -rf /tmp/nettle-build
|
#!/usr/bin/env bash
# Airflow setup
# Constants
source airflow_0.sh
# Activate conda environment
eval "$(conda shell.bash hook)"
conda activate "$env_name"
# Initialize the database
pushd "${AIRFLOW_HOME}" 1>/dev/null
airflow initdb
popd 1>/dev/null
|
[ -e ../../src/python/marbles/newsfeed/data/phantomjs-linux ] || exit 1
[ -e ../../src/python/marbles/newsfeed/data/phantomjs-ubuntu ] || exit 1
[ -e ../../src/python/marbles/newsfeed/data/phantomjs-osx ] || exit 1
exit 0
|
//
// User
// (づ。◕‿‿◕。)づ
//
// Related to the user layer.
const SECRET_KEY_LOCATION = "AUTH_SECRET_KEY"
// Crypto
// ======
app.ports.fabricateSecretKey.subscribe(event => {
keyFromPassphrase(event.data)
.then(data => toCache(SECRET_KEY_LOCATION, data))
.then(_ => {
app.ports.fromAlien.send({
tag: event.tag,
data: null,
error: null
})
})
.catch(reportError(event))
})
// Blockstack
// ----------
let bl
function bl0ckst4ck() {
if (!bl) {
importScripts("../vendor/blockstack.min.js")
bl = new blockstack.UserSession({
appConfig: new blockstack.AppConfig({
appDomain: location.origin
}),
sessionStore: BLOCKSTACK_SESSION_STORE
})
}
return bl
}
const BLOCKSTACK_SESSION_STORE = {
key: "AUTH_BLOCKSTACK_SESSION",
getSessionData() { return fromCache(this.key).then(a => a || {}) },
setSessionData(data) { return toCache(this.key, data) },
deleteSessionData() { return removeCache(this.key) }
}
app.ports.deconstructBlockstack.subscribe(_ => {
BLOCKSTACK_SESSION_STORE.deleteSessionData()
bl = null
})
app.ports.handlePendingBlockstackSignIn.subscribe(authResponse => {
const session = bl0ckst4ck()
session.handlePendingSignIn(authResponse).then(_ => {
app.ports.fromAlien.send({
tag: "SIGN_IN",
data: { method: "BLOCKSTACK", passphrase: null },
error: null
})
}).catch(
reportError({ tag: "AUTH_BLOCKSTACK" })
)
})
app.ports.redirectToBlockstackSignIn.subscribe(event => {
const session = bl0ckst4ck()
session.generateAndStoreTransitKey().then(transitKey => {
const dir = location.pathname.replace("workers/brain.js", "")
return session.makeAuthRequest(
transitKey,
location.origin + dir + "?action=authenticate/blockstack",
location.origin + dir + "manifest.json",
[ "store_write" ]
)
}).then(authRequest => {
self.postMessage({
action: "REDIRECT_TO_BLOCKSTACK",
data: authRequest
})
}).catch(
reportError(event)
)
})
app.ports.requestBlockstack.subscribe(event => {
const session = bl0ckst4ck()
bl
.getFile(event.data.file)
.then( sendJsonData(event) )
.catch( reportError(event) )
})
app.ports.toBlockstack.subscribe(event => {
const json = JSON.stringify(event.data.data)
const session = bl0ckst4ck()
bl
.putFile(event.data.file, json)
.then( storageCallback(event) )
.catch( reportError(event) )
})
// Dropbox
// -------
app.ports.requestDropbox.subscribe(event => {
const params = {
path: "/" + event.data.file
}
const dataPromise =
!navigator.onLine
? fromCache(event.tag + "_" + event.data.file)
: fetch("https://content.dropboxapi.com/2/files/download", {
method: "POST",
headers: {
"Authorization": "Bearer " + event.data.token,
"Dropbox-API-Arg": JSON.stringify(params)
}
})
.then(r => r.ok ? r.text() : r.json())
.then(r => r.error ? null : r)
.then(decryptIfNeeded)
dataPromise
.then( sendJsonData(event) )
.catch( reportError(event) )
})
app.ports.toDropbox.subscribe(event => {
const json = JSON.stringify(event.data.data)
const reporter = reportError(event)
const params = {
path: "/" + event.data.file,
mode: "overwrite",
mute: true
}
navigator.onLine && encryptWithSecretKey(json)
.then(data => {
return fetch("https://content.dropboxapi.com/2/files/upload", {
method: "POST",
headers: {
"Authorization": "Bearer " + event.data.token,
"Content-Type": "application/octet-stream",
"Dropbox-API-Arg": JSON.stringify(params)
},
body: data
})
})
.then( storageCallback(event) )
.catch(reporter)
toCache(event.tag + "_" + event.data.file, event.data.data)
.then( !navigator.onLine ? storageCallback(event) : identity )
.catch(reporter)
})
// IPFS
// ----
const IPFS_ROOT = "/Applications/Diffuse/"
app.ports.requestIpfs.subscribe(event => {
const apiOrigin = event.data.apiOrigin
const path = IPFS_ROOT + event.data.file
fetch(apiOrigin + "/api/v0/files/read?arg=" + path)
.then(r => r.ok ? r.text() : r.json())
.then(r => r.Code === 0 ? null : r)
.then(decryptIfNeeded)
.then( sendJsonData(event) )
.catch( reportError(event) )
})
app.ports.toIpfs.subscribe(event => {
const apiOrigin = event.data.apiOrigin
const json = JSON.stringify(event.data.data)
const params = new URLSearchParams({
arg: IPFS_ROOT + event.data.file,
create: true,
offset: 0,
parents: true,
truncate: true
}).toString()
encryptWithSecretKey(json)
.then(data => {
const formData = new FormData()
formData.append("data", data)
return fetch(
apiOrigin + "/api/v0/files/write?" + params,
{ method: "POST", body: formData }
)
})
.then( storageCallback(event) )
.catch( reportError(event) )
})
// Legacy
// ------
app.ports.requestLegacyLocalData.subscribe(event => {
let oldIdx
let key = location.hostname + ".json"
oldIdx = indexedDB.open(key, 1)
oldIdx.onsuccess = _ => {
const old = oldIdx.result
const tra = old.transaction([key], "readwrite")
const req = tra.objectStore(key).get(key)
req.onsuccess = _ => {
if (req.result) sendJsonData(event)(req.result)
}
}
})
// Remote Storage
// --------------
let rs
let rsClient
function remoteStorage(event) {
if (!rs) {
importScripts("../vendor/remotestorage.min.js")
rs = new RemoteStorage({ cache: false })
rs.access.claim("diffuse", "rw")
rsClient = rs.scope("/diffuse/")
return new Promise(resolve => {
rs.on("connected", resolve)
rs.connect(event.data.userAddress, event.data.token)
})
} else {
return Promise.resolve()
}
}
function remoteStorageIsUnavailable(event) {
return !navigator.onLine &&
!isLocalHost(event.data.userAddress.replace(/^[^@]*@/, ""))
}
app.ports.deconstructRemoteStorage.subscribe(_ => {
rs = null
rsClient = null
})
app.ports.requestRemoteStorage.subscribe(event => {
const isOffline =
remoteStorageIsUnavailable(event)
const dataPromise =
isOffline
? fromCache(event.tag + "_" + event.data.file)
: remoteStorage(event)
.then(_ => rsClient.getFile(event.data.file))
.then(r => r.data)
.then(decryptIfNeeded)
dataPromise
.then( sendJsonData(event) )
.catch( reportError(event) )
})
app.ports.toRemoteStorage.subscribe(event => {
const json = JSON.stringify(event.data.data)
const doEncryption = _ => encryptWithSecretKey(json)
const isOffline = remoteStorageIsUnavailable(event)
!isOffline && remoteStorage(event)
.then(doEncryption)
.then(data => rsClient.storeFile("application/json", event.data.file, data))
.then( storageCallback(event) )
.catch( reportError(event) )
toCache(event.tag + "_" + event.data.file, event.data.data)
.then( isOffline ? storageCallback(event) : identity )
.catch( reportError(event) )
})
// Textile
// -------
let tt
function textile() {
if (!tt) {
importScripts("../textile.js")
tt = true
}
}
app.ports.requestTextile.subscribe(event => {
const apiOrigin = event.data.apiOrigin
textile()
Textile.ensureThread
(apiOrigin)
.then(_ => Textile.getFile(apiOrigin, event.data.file))
.then(f => f ? Textile.readFile(apiOrigin, f) : null)
.then( sendJsonData(event) )
.catch( reportError(event) )
})
app.ports.toTextile.subscribe(event => {
const apiOrigin = event.data.apiOrigin
const json = JSON.stringify(event.data.data)
textile()
Textile.ensureThread
(apiOrigin)
.then(_ => Textile.getFile(apiOrigin))
.then(f => f ? Textile.deleteBlock(apiOrigin, f) : null)
.then(_ => Textile.useMill(apiOrigin, event.data.file, json))
.then(m => Textile.addFileToThread(apiOrigin, m))
.then( storageCallback(event) )
.catch( reportError(event) )
})
// 🛠
function decryptIfNeeded(data) {
if (typeof data !== "string") {
return Promise.resolve(data)
} else if (data.startsWith("{") || data.startsWith("[")) {
return Promise.resolve(data)
} else {
return data
? getSecretKey().then(secretKey => decrypt(secretKey, data))
: Promise.resolve(null)
}
}
function encryptWithSecretKey(unencryptedData) {
return unencryptedData
? getSecretKey()
.then(secretKey => encrypt(secretKey, unencryptedData))
.catch(_ => unencryptedData)
: null
}
function getSecretKey() {
return getFromIndex({
key: SECRET_KEY_LOCATION
}).then(key => {
return key ? key : Promise.reject(new Error("MISSING_SECRET_KEY"))
})
}
function isAuthMethodService(eventTag) {
return (
eventTag.startsWith("AUTH_") &&
eventTag !== "AUTH_BLOCKSTACK_SESSION" &&
eventTag !== "AUTH_ENCLOSED_DATA" &&
eventTag !== "AUTH_METHOD" &&
eventTag !== "AUTH_SECRET_KEY"
)
}
function isLocalHost(url) {
return (
url.startsWith("localhost") ||
url.startsWith("localhost") ||
url.startsWith("127.0.0.1") ||
url.startsWith("127.0.0.1")
)
}
function sendData(event, opts) {
return data => {
app.ports.fromAlien.send({
tag: event.tag,
data: (opts && opts.parseJSON && typeof data === "string") ? JSON.parse(data) : data,
error: null
})
}
}
function sendJsonData(event) {
return sendData(event, { parseJSON: true })
}
function storageCallback(event) {
return _ => {
app.ports.savedHypaethralBit.send()
}
}
|
#!/bin/bash
#SBATCH -N 1
#SBATCH -p RM
#SBATCH --ntasks-per-node 28
#SBATCH -t 5:00:00
SEQ="./seqs/RFAM/RF02543.fasta"
CMD="./bin/sankoff"
OPT="-t 1"
OUT="cpu1"
module avail cuda
module load cuda
set -x
#run GPU program
cd $HOME"/hpc_foldalign"
strace -ve wait4 /usr/bin/time -v $CMD $OPT $SEQ >> $SEQ.$OUT.output 2>&1
|
#!/bin/bash
# generate bitwarden .env files
BITWARDEN_PROD_FILE="$1"
echo "WEBSOCKET_ENABLED=true" > $BITWARDEN_PROD_FILE
echo "SIGNUPS_ALLOWED=true" >> $BITWARDEN_PROD_FILE
chmod 600 $BITWARDEN_PROD_FILE |
def quantized_bias_add(input_val: int, bias: int, int_bits: int, frac_bits: int) -> int:
# Calculate the scaling factor based on the number of fractional bits
scale_factor = 2 ** frac_bits
# Scale the input and bias to align with the fixed-point representation
scaled_input = input_val * scale_factor
scaled_bias = bias * scale_factor
# Perform the bias addition in the scaled domain
result = scaled_input + scaled_bias
# Adjust the result to the original fixed-point representation
result = result // scale_factor
return result |
class Contact {
var name: String
var email: String
var phone: String
func setName(name: String) {
self.name = name
}
func setEmail(email: String) {
self.email = email
}
func setPhone(phone: String) {
self.phone = phone
}
func printContact() {
print("Name: \(name)")
print("Email: \(email)")
print("Phone: \(phone)")
}
} |
#!/bin/bash
HEADERS_REPO="https://github.com/KhronosGroup/OpenCL-Headers.git"
HEADERS_COMMIT="d1b936b72b9610626ecab8a991cec18348fba047"
LOADER_REPO="https://github.com/KhronosGroup/OpenCL-ICD-Loader.git"
LOADER_COMMIT="99df8d88f7509739be7849da03a9fb1fb8bcbfa4"
ffbuild_enabled() {
return 0
}
ffbuild_dockerstage() {
to_df "ADD $SELF /stage.sh"
to_df "RUN run_stage"
}
ffbuild_dockerbuild() {
mkdir opencl && cd opencl
git-mini-clone "$HEADERS_REPO" "$HEADERS_COMMIT" headers
mkdir -p "$FFBUILD_PREFIX"/include/CL
cp -r headers/CL/* "$FFBUILD_PREFIX"/include/CL/.
git-mini-clone "$LOADER_REPO" "$LOADER_COMMIT" loader
cd loader
mkdir build && cd build
cmake -DCMAKE_TOOLCHAIN_FILE="$FFBUILD_CMAKE_TOOLCHAIN" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX="$FFBUILD_PREFIX" -DOPENCL_ICD_LOADER_HEADERS_DIR="$FFBUILD_PREFIX"/include -DBUILD_SHARED_LIBS=OFF -DOPENCL_ICD_LOADER_DISABLE_OPENCLON12=ON -DOPENCL_ICD_LOADER_PIC=ON -DOPENCL_ICD_LOADER_BUILD_TESTING=OFF ..
make -j$(nproc)
make install
echo "prefix=$FFBUILD_PREFIX" > OpenCL.pc
echo "exec_prefix=\${prefix}" >> OpenCL.pc
echo "libdir=\${exec_prefix}/lib" >> OpenCL.pc
echo "includedir=\${prefix}/include" >> OpenCL.pc
echo >> OpenCL.pc
echo "Name: OpenCL" >> OpenCL.pc
echo "Description: OpenCL ICD Loader" >> OpenCL.pc
echo "Version: 9999" >> OpenCL.pc
echo "Libs: -L\${libdir} -lOpenCL" >> OpenCL.pc
echo "Libs.private: -lole32 -lshlwapi -lcfgmgr32" >> OpenCL.pc
echo "Cflags: -I\${includedir}" >> OpenCL.pc
mkdir -p "$FFBUILD_PREFIX"/lib/pkgconfig
mv OpenCL.pc "$FFBUILD_PREFIX"/lib/pkgconfig/OpenCL.pc
cd ../../..
rm -rf opencl
}
ffbuild_configure() {
echo --enable-opencl
}
ffbuild_unconfigure() {
echo --disable-opencl
}
|
//index.js
let db = require('../../model/db.js');
//获取应用实例
var app = getApp()
Page({
data: {
topItem: {
content: '最新的笔记会显示在顶部, \n文字会自动保存'
},
noteList: [
],
userInfo: {},
pressStartAt: 0,
pressEndAt: 0,
isPopup: false
},
//事件处理函数
bindViewTap: function () {
wx.navigateTo({
url: '../logs/logs'
})
},
onLoad: function () {
var that = this;
this.updateListUi();
},
onReady: function () {
},
onShow() {
if (db.cache.isNoteModified) {
this.updateListUi();
}
},
onItemClick: function (event) {
if (this.data.isPopup == false) {
if (this.data.pressEndAt - this.data.pressStartAt < 200) {
let note = event.currentTarget.dataset;
if (note.showRemove) {
note.showRemove = false;
let item = this.getNote(note.id);
item.showRemove = false;
this.setData({
noteList: this.data.noteList
});
} else {
getApp().globalData.currentNote = note;
wx.navigateTo({
url: `/pages/editor/editor?id=${note.id}&isEdit=0`,
})
}
}
} else {
this.hidePopup();
}
},
onLongPress: function (event) {
let note = event.currentTarget.dataset;
let item = this.getNote(note.id);
item.showRemove = true;
this.setData({
noteList: this.data.noteList
});
},
onPress(event) {
this.data.pressStartAt = event.timeStamp;
},
onPressEnd(event) {
this.data.pressEndAt = event.timeStamp;
},
onContainerTouch(event) {
this.data.noteList.forEach(item => {
item.showRemove = false;
});
this.setData({
noteList: this.data.noteList
});
this.hidePopup();
},
onMore(event) {
if (this.data.isPopup == true) {
this.hidePopup();
} else {
this.data.isPopup = true;
let note = event.currentTarget.dataset;
let item = this.getNote(note.id);
this.currentId = note.id;
item.showMore = true;
this.setData({
noteList: this.data.noteList,
showMore: true
});
}
},
onRemove: function (event) {
this.currentId = 0;
let id = event.currentTarget.dataset.id;
let note = this.getNote(id);
note.isRemoved = true;
this.setData({
noteList: this.data.noteList
});
let that = this;
if (note) {
db.removeNote(id)
.then(result => {
let temp;
that.data.noteList = that.data.noteList.filter(item => {
return item.id != id;
});
that.setData({
noteList: that.data.noteList
});
})
.catch(error => {
wx.showToast({
title: '' + error,
})
});
}
},
onAddNote(event) {
if (this.data.isPopup == false) {
wx.navigateTo({
url: `/pages/editor/editor?isEdit=1`,
})
} else {
this.hidePopup();
}
},
getNote(id) {
for (let i = 0, size = this.data.noteList.length; i < size; i++) {
let note = this.data.noteList[i];
if (note && note.id == id) {
return note;
}
}
},
updateListUi() {
console.debug(this.data);
let data = this.data;
db.getAllNote()
.then(notes => {
if (!notes || notes.length == 0) {
this.checkNoteListEmpty();
} else {
let temp = new Date();
const INTERVAL = (8 * 60 * 60 * 1000);
notes.forEach(item => {
temp.setTime(item.createdAt + INTERVAL);
item.formatDate = temp.getUTCFullYear() + "." + (1+ temp.getMonth()) + "." +
temp.getUTCDate();
});
data.noteList = notes;
this.updateUi();
}
}, error => {
this.checkNoteListEmpty();
})
.catch(error => {
this.checkNoteListEmpty();
});
},
updateUi() {
this.setData(this.data);
},
checkNoteListEmpty() {
if (this.data.noteList.length == 0) {
this.data.noteList.push({
id: 'id_' + 0,
content: '最新的笔记会显示在顶部, \n\n文字会自动保存',
status: 0
});
this.setData({
noteList: this.data.noteList
})
}
},
hidePopup(event) {
this.data.isPopup = false;
if (this.currentId && this.currentId != 0) {
let note = this.getNote(this.currentId);
note.showMore = false;
this.setData({
noteList: this.data.noteList,
showMore: false
});
this.currentId = 0;
}
},
onShareAppMessage() {
let path = this.currentId > 0 ? `/pages/editor/editor?id=${this.currentId}&isEdit=0` :
'/pages/index/index';
return {
title: '我的笔记书',
path,
success() {
},
fail() {
},
complete() {
}
};
}
})
|
#!/bin/bash
# Deactivate testing virtualenv and reactivate original one
if [[ $TRAVIS_BUILD_STAGE_NAME == 'Deploy' ]]; then
deactivate
echo "Reactivating virtualenv:"
echo $ORIG_ENV
source $ORIG_ENV
fi
|
#!/usr/bin/env bash
set -e
# TODO: Set to URL of git repo.
PROJECT_GIT_URL='https://github.com/coding-eunbong73/profiles-rest-api.git'
PROJECT_BASE_PATH='/usr/local/apps/profiles-rest-api'
echo "Installing dependencies..."
apt-get update
apt-get install -y python3-dev python3-venv sqlite python-pip supervisor nginx git
# Create project directory
mkdir -p $PROJECT_BASE_PATH
git clone $PROJECT_GIT_URL $PROJECT_BASE_PATH
# Create virtual environment
mkdir -p $PROJECT_BASE_PATH/env
python3 -m venv $PROJECT_BASE_PATH/env
# Install python packages
$PROJECT_BASE_PATH/env/bin/pip install -r $PROJECT_BASE_PATH/requirements.txt
$PROJECT_BASE_PATH/env/bin/pip install uwsgi==2.0.18
# Run migrations and collectstatic
cd $PROJECT_BASE_PATH
$PROJECT_BASE_PATH/env/bin/python manage.py migrate
$PROJECT_BASE_PATH/env/bin/python manage.py collectstatic --noinput
# Configure supervisor
cp $PROJECT_BASE_PATH/deploy/supervisor_profiles_api.conf /etc/supervisor/conf.d/profiles_api.conf
supervisorctl reread
supervisorctl update
supervisorctl restart profiles_api
# Configure nginx
cp $PROJECT_BASE_PATH/deploy/nginx_profiles_api.conf /etc/nginx/sites-available/profiles_api.conf
rm /etc/nginx/sites-enabled/default
ln -s /etc/nginx/sites-available/profiles_api.conf /etc/nginx/sites-enabled/profiles_api.conf
systemctl restart nginx.service
echo "DONE! :)"
|
#!/bin/bash
set -e
SLNDIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd $SLNDIR/RutokenPkcs11Interop
./regeneratecode.sh
cd $SLNDIR/RutokenPkcs11Interop.Tests
./regeneratecode.sh
|
<reponame>yinfuquan/spring-boot-examples
package com.yin.springboot.user.center.config;
import com.yin.springboot.user.center.config.loginconfig.LoginSuccessHandler;
import com.yin.springboot.user.center.config.service.UserDetailsServiceImpl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.builders.WebSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.web.util.matcher.AntPathRequestMatcher;
@Configuration
@EnableWebSecurity
@EnableGlobalMethodSecurity(prePostEnabled = true, securedEnabled = true, jsr250Enabled = true)
public class WebSecurityConfiguration extends WebSecurityConfigurerAdapter {
//======================基于内存(这里不用配置,配置zhanghao)=================================================================================
/* @Override
protected void configure(AuthenticationManagerBuilder auth) throws Exception {
auth.inMemoryAuthentication()
// 在内存中创建用户并为密码加密
.withUser("user").password(passwordEncoder().encode("<PASSWORD>")).roles("USER")
.and()
.withUser("admin").password(passwordEncoder().encode("<PASSWORD>")).roles("ADMIN");
}
*/
//======================jdbc=================================================================================
// @Override
// public void configure(WebSecurity web) throws Exception {
// // 将 check_token 暴露出去,否则资源服务器访问时报 403 错误
// web.ignoring().antMatchers("/oauth/check_token");
// web.ignoring().antMatchers("static/**");
// }
@Bean
public BCryptPasswordEncoder passwordEncoder() {
// 设置默认的加密方式
return new BCryptPasswordEncoder();
}
// @Override
// protected void configure(AuthenticationManagerBuilder auth) throws Exception {
//
// auth.inMemoryAuthentication()
// // 在内存中创建用户并为密码加密
// .withUser("user").password(passwordEncoder().encode("<PASSWORD>")).roles("USER").authorities("add")
// .and()
// .withUser("admin").password(passwordEncoder().encode("<PASSWORD>")).roles("ADMIN");
//
// }
@Autowired
private LoginSuccessHandler loginSuccessHandler;
@Override
protected void configure(HttpSecurity http) throws Exception {
http
.authorizeRequests()
//设置权限
.antMatchers("/nacos/**").permitAll()
.antMatchers("/addUser").hasAnyAuthority("add_user")
.antMatchers("/login").permitAll()
.antMatchers("/css/**", "/images/**", "/js/**", "/layui/**").permitAll()
//.anyRequest().hasRole("USER")
.anyRequest().authenticated()
.and()
// 关闭跨站拦截
.csrf()
//关闭跨站拦截的具体路径
// .requireCsrfProtectionMatcher(new AntPathRequestMatcher("/oauth/authorize"))
.disable()
.logout()
.logoutUrl("/logout")
.logoutSuccessUrl("/login")
.and()
.formLogin()
.successHandler(loginSuccessHandler)
// .loginProcessingUrl("/my/login")
.loginPage("/login");
}
// @Bean
// public BCryptPasswordEncoder passwordEncoder() {
// // 设置默认的加密方式
// return new BCryptPasswordEncoder();
// }
@Bean
@Override
public UserDetailsService userDetailsService() {
return new UserDetailsServiceImpl();
}
@Override
protected void configure(AuthenticationManagerBuilder auth) throws Exception {
// 使用自定义认证与授权
auth.userDetailsService(userDetailsService());
}
@Override
public void configure(WebSecurity web) throws Exception {
// 将 check_token 暴露出去,否则资源服务器访问时报 403 错误
web.ignoring().antMatchers("/oauth/check_token");
web.ignoring().antMatchers("static/**");
}
}
|
# Controller generated by Typus, use it to extend admin functionality.
class Admin::TaggingsController < Admin::ResourcesController
end
|
def assert_not_equals(val1, val2):
if val1 == val2:
raise AssertionError(f"{val1} is equal to {val2}") |
package org.nem.core.model;
import org.nem.core.utils.StringUtils;
import java.util.Properties;
/**
* A strongly typed NEM property bag.
*/
public class NemProperties {
private final Properties properties;
/**
* Creates a new property bag.
*
* @param properties The java properties.
*/
public NemProperties(final Properties properties) {
this.properties = properties;
}
/**
* Gets the value of a required string property.
*
* @param name The property name.
* @return The property value.
*/
public String getString(final String name) {
final String value = this.properties.getProperty(name);
if (null == value) {
throw new RuntimeException(String.format("property %s must not be null", name));
}
return value;
}
/**
* Gets the value of a required integer property.
*
* @param name The property name.
* @return The property value.
*/
public int getInteger(final String name) {
return Integer.valueOf(this.getString(name));
}
/**
* Gets the value of a required long property.
*
* @param name The property name.
* @return The property value.
*/
public long getLong(final String name) {
return Long.valueOf(this.getString(name));
}
/**
* Gets the value of a required boolean property.
*
* @param name The property name.
* @return The property value.
*/
public boolean getBoolean(final String name) {
return parseBoolean(this.getString(name));
}
/**
* Gets the value of an optional string property.
*
* @param name The property name.
* @param defaultValue The default value to use in case there is no property value.
* @return The property value.
*/
public String getOptionalString(final String name, final String defaultValue) {
final String value = this.properties.getProperty(name);
return null == value ? defaultValue : value;
}
/**
* Gets the value of an optional integer property.
*
* @param name The property name.
* @param defaultValue The default value to use in case there is no property value.
* @return The property value.
*/
public int getOptionalInteger(final String name, final Integer defaultValue) {
final String value = this.properties.getProperty(name);
return null == value ? defaultValue : Integer.valueOf(value);
}
/**
* Gets the value of an optional long property.
*
* @param name The property name.
* @param defaultValue The default value to use in case there is no property value.
* @return The property value.
*/
public long getOptionalLong(final String name, final Long defaultValue) {
final String value = this.properties.getProperty(name);
return null == value ? defaultValue : Long.valueOf(value);
}
/**
* Gets the value of an optional boolean property.
*
* @param name The property name.
* @param defaultValue The default value to use in case there is no property value.
* @return The property value.
*/
public boolean getOptionalBoolean(final String name, final Boolean defaultValue) {
final String value = this.properties.getProperty(name);
return null == value ? defaultValue : parseBoolean(value);
}
/**
* Gets the value of an optional string array property.
*
* @param name The property name.
* @param defaultValue The default value to use in case there is no property value.
* @return The property value.
*/
public String[] getOptionalStringArray(final String name, final String defaultValue) {
final String stringArray = this.getOptionalString(name, defaultValue);
return StringUtils.isNullOrWhitespace(stringArray) ? new String[]{} : stringArray.split("\\|");
}
private static boolean parseBoolean(final String s) {
// constrain false values to "false"
if (s.equalsIgnoreCase("true")) {
return true;
} else if (s.equalsIgnoreCase("false")) {
return false;
}
throw new RuntimeException(new NumberFormatException("string must be either true or false"));
}
}
|
#!/usr/bin/env bash
# Copyright 2019 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Runs prow/pj-on-kind.sh with config arguments specific to the prow.k8s.io instance.
set -o errexit
set -o nounset
set -o pipefail
export CONFIG_PATH="$(readlink -f $(dirname "${BASH_SOURCE[0]}")/../prow/config.yaml)"
export JOB_CONFIG_PATH="$(readlink -f $(dirname "${BASH_SOURCE[0]}")/jobs)"
../prow/pj-on-kind.sh "$@"
# Swap the above command for the following one for use outside kubernetes/test-infra.
# bash <(curl -s https://raw.githubusercontent.com/kubernetes/test-infra/master/prow/pj-on-kind.sh) "$@"
|
/**
* @fileoverview This file is generated by the Angular 2 template compiler.
* Do not edit.
* @suppress {suspiciousCode,uselessCode,missingProperties}
*/
/* tslint:disable */
import * as import0 from '../../../app/head/head.component';
import * as import1 from '@angular/core/src/linker/view';
import * as import2 from '@angular/core/src/render/api';
import * as import3 from '@angular/core/src/linker/view_utils';
import * as import4 from '@angular/core/src/metadata/view';
import * as import5 from '@angular/core/src/linker/view_type';
import * as import6 from '@angular/core/src/change_detection/constants';
import * as import7 from '@angular/core/src/linker/component_factory';
import * as import8 from '@angular/core/src/animation/animation_transition';
import * as import9 from '@angular/core/src/animation/animation_sequence_player';
import * as import10 from '@angular/core/src/animation/animation_styles';
import * as import11 from '@angular/core/src/animation/animation_style_util';
import * as import12 from '@angular/core/src/animation/animation_keyframe';
import * as import13 from '@angular/core/src/animation/animation_player';
import * as import14 from '@angular/core/src/linker/view_container';
import * as import15 from '../../node_modules/@angular/common/src/directives/ng_if.ngfactory';
import * as import16 from '@angular/core/src/change_detection/change_detection_util';
import * as import17 from '@angular/core/src/linker/template_ref';
import * as import18 from '@angular/common/src/directives/ng_if';
export class Wrapper_HeadComponent {
/*private*/ _eventHandler:Function;
context:import0.HeadComponent;
/*private*/ _changed:boolean;
constructor() {
this._changed = false;
this.context = new import0.HeadComponent();
}
ngOnDetach(view:import1.AppView<any>,componentView:import1.AppView<any>,el:any):void {
}
ngOnDestroy():void {
}
ngDoCheck(view:import1.AppView<any>,el:any,throwOnChange:boolean):boolean {
var changed:any = this._changed;
this._changed = false;
return changed;
}
checkHost(view:import1.AppView<any>,componentView:import1.AppView<any>,el:any,throwOnChange:boolean):void {
}
handleEvent(eventName:string,$event:any):boolean {
var result:boolean = true;
return result;
}
subscribe(view:import1.AppView<any>,_eventHandler:any):void {
this._eventHandler = _eventHandler;
}
}
var renderType_HeadComponent_Host:import2.RenderComponentType = import3.createRenderComponentType('',0,import4.ViewEncapsulation.None,([] as any[]),{});
class View_HeadComponent_Host0 extends import1.AppView<any> {
_el_0:any;
compView_0:import1.AppView<import0.HeadComponent>;
_HeadComponent_0_3:Wrapper_HeadComponent;
constructor(viewUtils:import3.ViewUtils,parentView:import1.AppView<any>,parentIndex:number,parentElement:any) {
super(View_HeadComponent_Host0,renderType_HeadComponent_Host,import5.ViewType.HOST,viewUtils,parentView,parentIndex,parentElement,import6.ChangeDetectorStatus.CheckAlways);
}
createInternal(rootSelector:string):import7.ComponentRef<any> {
this._el_0 = import3.selectOrCreateRenderHostElement(this.renderer,'head_box',import3.EMPTY_INLINE_ARRAY,rootSelector,(null as any));
this.compView_0 = new View_HeadComponent0(this.viewUtils,this,0,this._el_0);
this._HeadComponent_0_3 = new Wrapper_HeadComponent();
this.compView_0.create(this._HeadComponent_0_3.context);
this.init(this._el_0,((<any>this.renderer).directRenderer? (null as any): [this._el_0]),(null as any));
return new import7.ComponentRef_<any>(0,this,this._el_0,this._HeadComponent_0_3.context);
}
injectorGetInternal(token:any,requestNodeIndex:number,notFoundResult:any):any {
if (((token === import0.HeadComponent) && (0 === requestNodeIndex))) { return this._HeadComponent_0_3.context; }
return notFoundResult;
}
detectChangesInternal(throwOnChange:boolean):void {
this._HeadComponent_0_3.ngDoCheck(this,this._el_0,throwOnChange);
this.compView_0.internalDetectChanges(throwOnChange);
}
destroyInternal():void {
this.compView_0.destroy();
}
visitRootNodesInternal(cb:any,ctx:any):void {
cb(this._el_0,ctx);
}
}
export const HeadComponentNgFactory:import7.ComponentFactory<import0.HeadComponent> = new import7.ComponentFactory<import0.HeadComponent>('head_box',View_HeadComponent_Host0,import0.HeadComponent);
const styles_HeadComponent:any[] = ([] as any[]);
var HeadComponent_heroState_states:any = {
'inactive': {
'top': '-100%',
'bottom': '100%'
}
,
'active': {
'top': '0',
'bottom': '64px'
}
,
'*': {}
}
;
function HeadComponent_heroState_factory(view:import1.AppView<any>,element:any,currentState:any,nextState:any):import8.AnimationTransition {
var previousPlayers:any = view.animationContext.getAnimationPlayers(element,((nextState == 'void')? (null as any): 'heroState'));
var collectedStyles:any = {};
var player:any = (null as any);
var totalTime:any = 0;
var defaultStateStyles:any = HeadComponent_heroState_states['*'];
var startStateStyles:any = HeadComponent_heroState_states[currentState];
if ((startStateStyles == (null as any))) { (startStateStyles = defaultStateStyles); }
var endStateStyles:any = HeadComponent_heroState_states[nextState];
if ((endStateStyles == (null as any))) { (endStateStyles = defaultStateStyles); }
if (((player == (null as any)) && ((currentState == 'inactive') && (nextState == 'active')))) {
player = new import9.AnimationSequencePlayer([view.renderer.animate(element,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[startStateStyles])),import11.balanceAnimationKeyframes(collectedStyles,endStateStyles,[
new import12.AnimationKeyframe(0,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[{}]))),
new import12.AnimationKeyframe(1,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[{}])))
]
),300,0,'ease-in',previousPlayers)]);
totalTime = 300;
}
if (((player == (null as any)) && ((currentState == 'active') && (nextState == 'inactive')))) {
player = new import9.AnimationSequencePlayer([view.renderer.animate(element,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[startStateStyles])),import11.balanceAnimationKeyframes(collectedStyles,endStateStyles,[
new import12.AnimationKeyframe(0,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[{}]))),
new import12.AnimationKeyframe(1,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[{}])))
]
),300,0,'ease-in',previousPlayers)]);
totalTime = 300;
}
if ((player == (null as any))) { (player = new import13.NoOpAnimationPlayer()); }
player.onDone(():void => {
player.destroy();
import11.renderStyles(element,view.renderer,import11.prepareFinalAnimationStyles(startStateStyles,endStateStyles));
});
new import9.AnimationSequencePlayer(previousPlayers).destroy();
import11.renderStyles(element,view.renderer,import11.clearStyles(startStateStyles));
view.animationContext.queueAnimation(element,'heroState',player);
return new import8.AnimationTransition(player,currentState,nextState,totalTime);
}
var HeadComponent_heroState2_states:any = {
'inactive': {'height': '0'},
'active': {'height': '*'},
'*': {}
}
;
function HeadComponent_heroState2_factory(view:import1.AppView<any>,element:any,currentState:any,nextState:any):import8.AnimationTransition {
var previousPlayers:any = view.animationContext.getAnimationPlayers(element,((nextState == 'void')? (null as any): 'heroState2'));
var collectedStyles:any = {};
var player:any = (null as any);
var totalTime:any = 0;
var defaultStateStyles:any = HeadComponent_heroState2_states['*'];
var startStateStyles:any = HeadComponent_heroState2_states[currentState];
if ((startStateStyles == (null as any))) { (startStateStyles = defaultStateStyles); }
var endStateStyles:any = HeadComponent_heroState2_states[nextState];
if ((endStateStyles == (null as any))) { (endStateStyles = defaultStateStyles); }
if (((player == (null as any)) && ((currentState == 'inactive') && (nextState == 'active')))) {
player = new import9.AnimationSequencePlayer([view.renderer.animate(element,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[startStateStyles])),import11.balanceAnimationKeyframes(collectedStyles,endStateStyles,[
new import12.AnimationKeyframe(0,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[{}]))),
new import12.AnimationKeyframe(1,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[{}])))
]
),500,0,'ease-in',previousPlayers)]);
totalTime = 500;
}
if (((player == (null as any)) && ((currentState == 'active') && (nextState == 'inactive')))) {
player = new import9.AnimationSequencePlayer([view.renderer.animate(element,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[startStateStyles])),import11.balanceAnimationKeyframes(collectedStyles,endStateStyles,[
new import12.AnimationKeyframe(0,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[{}]))),
new import12.AnimationKeyframe(1,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[{}])))
]
),500,0,'ease-in',previousPlayers)]);
totalTime = 500;
}
if ((player == (null as any))) { (player = new import13.NoOpAnimationPlayer()); }
player.onDone(():void => {
player.destroy();
import11.renderStyles(element,view.renderer,import11.prepareFinalAnimationStyles(startStateStyles,endStateStyles));
});
new import9.AnimationSequencePlayer(previousPlayers).destroy();
import11.renderStyles(element,view.renderer,import11.clearStyles(startStateStyles));
view.animationContext.queueAnimation(element,'heroState2',player);
return new import8.AnimationTransition(player,currentState,nextState,totalTime);
}
var HeadComponent_topchange_states:any = {
'toptype1': {'left': '0'},
'toptype2': {'left': '-100%'},
'toptype3': {'left': '-200%'},
'*': {}
}
;
function HeadComponent_topchange_factory(view:import1.AppView<any>,element:any,currentState:any,nextState:any):import8.AnimationTransition {
var previousPlayers:any = view.animationContext.getAnimationPlayers(element,((nextState == 'void')? (null as any): 'topchange'));
var collectedStyles:any = {};
var player:any = (null as any);
var totalTime:any = 0;
var defaultStateStyles:any = HeadComponent_topchange_states['*'];
var startStateStyles:any = HeadComponent_topchange_states[currentState];
if ((startStateStyles == (null as any))) { (startStateStyles = defaultStateStyles); }
var endStateStyles:any = HeadComponent_topchange_states[nextState];
if ((endStateStyles == (null as any))) { (endStateStyles = defaultStateStyles); }
if (((player == (null as any)) && ((currentState == 'toptype1') && (nextState == 'toptype2')))) {
player = new import9.AnimationSequencePlayer([view.renderer.animate(element,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[startStateStyles])),import11.balanceAnimationKeyframes(collectedStyles,endStateStyles,[
new import12.AnimationKeyframe(0,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[{}]))),
new import12.AnimationKeyframe(1,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[{}])))
]
),500,0,'ease-in',previousPlayers)]);
totalTime = 500;
}
if (((player == (null as any)) && ((currentState == 'toptype2') && (nextState == 'toptype3')))) {
player = new import9.AnimationSequencePlayer([view.renderer.animate(element,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[startStateStyles])),import11.balanceAnimationKeyframes(collectedStyles,endStateStyles,[
new import12.AnimationKeyframe(0,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[{}]))),
new import12.AnimationKeyframe(1,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[{}])))
]
),500,0,'ease-in',previousPlayers)]);
totalTime = 500;
}
if (((player == (null as any)) && ((currentState == 'toptype3') && (nextState == 'toptype2')))) {
player = new import9.AnimationSequencePlayer([view.renderer.animate(element,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[startStateStyles])),import11.balanceAnimationKeyframes(collectedStyles,endStateStyles,[
new import12.AnimationKeyframe(0,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[{}]))),
new import12.AnimationKeyframe(1,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[{}])))
]
),500,0,'ease-in',previousPlayers)]);
totalTime = 500;
}
if (((player == (null as any)) && ((currentState == 'toptype2') && (nextState == 'toptype1')))) {
player = new import9.AnimationSequencePlayer([view.renderer.animate(element,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[startStateStyles])),import11.balanceAnimationKeyframes(collectedStyles,endStateStyles,[
new import12.AnimationKeyframe(0,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[{}]))),
new import12.AnimationKeyframe(1,new import10.AnimationStyles(import11.collectAndResolveStyles(collectedStyles,[{}])))
]
),500,0,'ease-in',previousPlayers)]);
totalTime = 500;
}
if ((player == (null as any))) { (player = new import13.NoOpAnimationPlayer()); }
player.onDone(():void => {
player.destroy();
import11.renderStyles(element,view.renderer,import11.prepareFinalAnimationStyles(startStateStyles,endStateStyles));
});
new import9.AnimationSequencePlayer(previousPlayers).destroy();
import11.renderStyles(element,view.renderer,import11.clearStyles(startStateStyles));
view.animationContext.queueAnimation(element,'topchange',player);
return new import8.AnimationTransition(player,currentState,nextState,totalTime);
}
var renderType_HeadComponent:import2.RenderComponentType = import3.createRenderComponentType('',0,import4.ViewEncapsulation.None,styles_HeadComponent,{
heroState: HeadComponent_heroState_factory,
heroState2: HeadComponent_heroState2_factory,
topchange: HeadComponent_topchange_factory
}
);
export class View_HeadComponent0 extends import1.AppView<import0.HeadComponent> {
_text_0:any;
_el_1:any;
_text_2:any;
_el_3:any;
_text_4:any;
_anchor_5:any;
/*private*/ _vc_5:import14.ViewContainer;
_TemplateRef_5_5:any;
_NgIf_5_6:import15.Wrapper_NgIf;
_text_6:any;
_anchor_7:any;
/*private*/ _vc_7:import14.ViewContainer;
_TemplateRef_7_5:any;
_NgIf_7_6:import15.Wrapper_NgIf;
_text_8:any;
_text_9:any;
_el_10:any;
_text_11:any;
_el_12:any;
_text_13:any;
_el_14:any;
_text_15:any;
_el_16:any;
_text_17:any;
_text_18:any;
_el_19:any;
_text_20:any;
_el_21:any;
_text_22:any;
_text_23:any;
_el_24:any;
_text_25:any;
_el_26:any;
_text_27:any;
_text_28:any;
_text_29:any;
_el_30:any;
_text_31:any;
_el_32:any;
_text_33:any;
_el_34:any;
_text_35:any;
_el_36:any;
_text_37:any;
_el_38:any;
_text_39:any;
_text_40:any;
_el_41:any;
_text_42:any;
_text_43:any;
_el_44:any;
_text_45:any;
_text_46:any;
_el_47:any;
_text_48:any;
_text_49:any;
_text_50:any;
_el_51:any;
_text_52:any;
_el_53:any;
_text_54:any;
_el_55:any;
_text_56:any;
_text_57:any;
_text_58:any;
_el_59:any;
_text_60:any;
_el_61:any;
_text_62:any;
_text_63:any;
_text_64:any;
_text_65:any;
_el_66:any;
_text_67:any;
_el_68:any;
_text_69:any;
_el_70:any;
_text_71:any;
_text_72:any;
_el_73:any;
_text_74:any;
_text_75:any;
_el_76:any;
_text_77:any;
_text_78:any;
_el_79:any;
_text_80:any;
_text_81:any;
_el_82:any;
_text_83:any;
_text_84:any;
_el_85:any;
_text_86:any;
_text_87:any;
_el_88:any;
_text_89:any;
_text_90:any;
_el_91:any;
_text_92:any;
_text_93:any;
_el_94:any;
_text_95:any;
_text_96:any;
_el_97:any;
_text_98:any;
_text_99:any;
_el_100:any;
_text_101:any;
_text_102:any;
_el_103:any;
_text_104:any;
_text_105:any;
_text_106:any;
_el_107:any;
_text_108:any;
_el_109:any;
_text_110:any;
_el_111:any;
_text_112:any;
_text_113:any;
_el_114:any;
_text_115:any;
_text_116:any;
_el_117:any;
_text_118:any;
_text_119:any;
_text_120:any;
_text_121:any;
/*private*/ _expr_128:any;
/*private*/ _expr_129:any;
/*private*/ _expr_130:any;
/*private*/ _expr_131:any;
/*private*/ _expr_132:any;
/*private*/ _expr_133:any;
/*private*/ _expr_134:any;
constructor(viewUtils:import3.ViewUtils,parentView:import1.AppView<any>,parentIndex:number,parentElement:any) {
super(View_HeadComponent0,renderType_HeadComponent,import5.ViewType.COMPONENT,viewUtils,parentView,parentIndex,parentElement,import6.ChangeDetectorStatus.CheckAlways);
this._expr_128 = import16.UNINITIALIZED;
this._expr_129 = import16.UNINITIALIZED;
this._expr_130 = import16.UNINITIALIZED;
this._expr_131 = import16.UNINITIALIZED;
this._expr_132 = import16.UNINITIALIZED;
this._expr_133 = import16.UNINITIALIZED;
this._expr_134 = import16.UNINITIALIZED;
}
createInternal(rootSelector:string):import7.ComponentRef<any> {
const parentRenderNode:any = this.renderer.createViewRoot(this.parentElement);
this._text_0 = this.renderer.createText(parentRenderNode,'\n ',(null as any));
this._el_1 = import3.createRenderElement(this.renderer,parentRenderNode,'div',new import3.InlineArray2(2,'class','head_box'),(null as any));
this._text_2 = this.renderer.createText(this._el_1,'\n ',(null as any));
this._el_3 = import3.createRenderElement(this.renderer,this._el_1,'div',new import3.InlineArray2(2,'class','head_top'),(null as any));
this._text_4 = this.renderer.createText(this._el_3,'\n ',(null as any));
this._anchor_5 = this.renderer.createTemplateAnchor(this._el_3,(null as any));
this._vc_5 = new import14.ViewContainer(5,3,this,this._anchor_5);
this._TemplateRef_5_5 = new import17.TemplateRef_(this,5,this._anchor_5);
this._NgIf_5_6 = new import15.Wrapper_NgIf(this._vc_5.vcRef,this._TemplateRef_5_5);
this._text_6 = this.renderer.createText(this._el_3,'\n ',(null as any));
this._anchor_7 = this.renderer.createTemplateAnchor(this._el_3,(null as any));
this._vc_7 = new import14.ViewContainer(7,3,this,this._anchor_7);
this._TemplateRef_7_5 = new import17.TemplateRef_(this,7,this._anchor_7);
this._NgIf_7_6 = new import15.Wrapper_NgIf(this._vc_7.vcRef,this._TemplateRef_7_5);
this._text_8 = this.renderer.createText(this._el_3,'\n ',(null as any));
this._text_9 = this.renderer.createText(this._el_1,'\n ',(null as any));
this._el_10 = import3.createRenderElement(this.renderer,this._el_1,'div',new import3.InlineArray2(2,'class','head_bottom'),(null as any));
this._text_11 = this.renderer.createText(this._el_1,'\n ',(null as any));
this._el_12 = import3.createRenderElement(this.renderer,this._el_1,'ul',new import3.InlineArray2(2,'class','head_ul_list'),(null as any));
this._text_13 = this.renderer.createText(this._el_12,'\n ',(null as any));
this._el_14 = import3.createRenderElement(this.renderer,this._el_12,'li',import3.EMPTY_INLINE_ARRAY,(null as any));
this._text_15 = this.renderer.createText(this._el_14,'page1',(null as any));
this._el_16 = import3.createRenderElement(this.renderer,this._el_14,'div',new import3.InlineArray2(2,'class','head_list_bt'),(null as any));
this._text_17 = this.renderer.createText(this._el_16,'page1下部显示',(null as any));
this._text_18 = this.renderer.createText(this._el_12,'\n ',(null as any));
this._el_19 = import3.createRenderElement(this.renderer,this._el_12,'li',import3.EMPTY_INLINE_ARRAY,(null as any));
this._text_20 = this.renderer.createText(this._el_19,'page2',(null as any));
this._el_21 = import3.createRenderElement(this.renderer,this._el_19,'div',new import3.InlineArray2(2,'class','head_list_bt'),(null as any));
this._text_22 = this.renderer.createText(this._el_21,'page2下部显示',(null as any));
this._text_23 = this.renderer.createText(this._el_12,'\n ',(null as any));
this._el_24 = import3.createRenderElement(this.renderer,this._el_12,'li',import3.EMPTY_INLINE_ARRAY,(null as any));
this._text_25 = this.renderer.createText(this._el_24,'page3',(null as any));
this._el_26 = import3.createRenderElement(this.renderer,this._el_24,'div',new import3.InlineArray2(2,'class','head_list_bt'),(null as any));
this._text_27 = this.renderer.createText(this._el_26,'page3下部显示',(null as any));
this._text_28 = this.renderer.createText(this._el_12,'\n ',(null as any));
this._text_29 = this.renderer.createText(this._el_1,'\n ',(null as any));
this._el_30 = import3.createRenderElement(this.renderer,this._el_1,'div',new import3.InlineArray2(2,'class','head_bottom_box'),(null as any));
this._text_31 = this.renderer.createText(this._el_30,'\n ',(null as any));
this._el_32 = import3.createRenderElement(this.renderer,this._el_30,'ul',import3.EMPTY_INLINE_ARRAY,(null as any));
this._text_33 = this.renderer.createText(this._el_32,'\n ',(null as any));
this._el_34 = import3.createRenderElement(this.renderer,this._el_32,'li',new import3.InlineArray2(2,'class','diandian_li dian_li_1'),(null as any));
this._text_35 = this.renderer.createText(this._el_32,'\n ',(null as any));
this._el_36 = import3.createRenderElement(this.renderer,this._el_32,'li',new import3.InlineArray2(2,'class','diandian_li dian_li_2'),(null as any));
this._text_37 = this.renderer.createText(this._el_32,'\n ',(null as any));
this._el_38 = import3.createRenderElement(this.renderer,this._el_32,'li',new import3.InlineArray2(2,'class','diandian_li dian_li_3'),(null as any));
this._text_39 = this.renderer.createText(this._el_32,'\n ',(null as any));
this._text_40 = this.renderer.createText(this._el_30,'\n ',(null as any));
this._el_41 = import3.createRenderElement(this.renderer,this._el_30,'div',import3.EMPTY_INLINE_ARRAY,(null as any));
this._text_42 = this.renderer.createText(this._el_41,'\n ',(null as any));
this._text_43 = this.renderer.createText(this._el_30,'\n ',(null as any));
this._el_44 = import3.createRenderElement(this.renderer,this._el_30,'div',import3.EMPTY_INLINE_ARRAY,(null as any));
this._text_45 = this.renderer.createText(this._el_44,'\n ',(null as any));
this._text_46 = this.renderer.createText(this._el_30,'\n ',(null as any));
this._el_47 = import3.createRenderElement(this.renderer,this._el_30,'div',new import3.InlineArray2(2,'class','head_bottom_up'),(null as any));
this._text_48 = this.renderer.createText(this._el_30,'\n ',(null as any));
this._text_49 = this.renderer.createText(this._el_1,'\n ',(null as any));
this._text_50 = this.renderer.createText(parentRenderNode,'\n ',(null as any));
this._el_51 = import3.createRenderElement(this.renderer,parentRenderNode,'div',new import3.InlineArray2(2,'class','head_start'),(null as any));
this._text_52 = this.renderer.createText(this._el_51,'\n ',(null as any));
this._el_53 = import3.createRenderElement(this.renderer,this._el_51,'div',new import3.InlineArray2(2,'class','head_button_start'),(null as any));
this._text_54 = this.renderer.createText(this._el_53,'\n ',(null as any));
this._el_55 = import3.createRenderElement(this.renderer,this._el_53,'div',new import3.InlineArray2(2,'class','head_button_text'),(null as any));
this._text_56 = this.renderer.createText(this._el_55,'START',(null as any));
this._text_57 = this.renderer.createText(this._el_53,'\n ',(null as any));
this._text_58 = this.renderer.createText(this._el_51,'\n ',(null as any));
this._el_59 = import3.createRenderElement(this.renderer,this._el_51,'div',new import3.InlineArray2(2,'class','head_button_stop'),(null as any));
this._text_60 = this.renderer.createText(this._el_59,'\n ',(null as any));
this._el_61 = import3.createRenderElement(this.renderer,this._el_59,'div',new import3.InlineArray2(2,'class','head_button_text'),(null as any));
this._text_62 = this.renderer.createText(this._el_61,'STOP',(null as any));
this._text_63 = this.renderer.createText(this._el_59,'\n ',(null as any));
this._text_64 = this.renderer.createText(this._el_51,'\n ',(null as any));
this._text_65 = this.renderer.createText(parentRenderNode,'\n ',(null as any));
this._el_66 = import3.createRenderElement(this.renderer,parentRenderNode,'div',new import3.InlineArray4(4,'class','head_box_more','id','head_box_more'),(null as any));
this._text_67 = this.renderer.createText(this._el_66,'\n ',(null as any));
this._el_68 = import3.createRenderElement(this.renderer,this._el_66,'div',new import3.InlineArray2(2,'class','head_box_list'),(null as any));
this._text_69 = this.renderer.createText(this._el_68,'\n ',(null as any));
this._el_70 = import3.createRenderElement(this.renderer,this._el_68,'div',import3.EMPTY_INLINE_ARRAY,(null as any));
this._text_71 = this.renderer.createText(this._el_70,'123123',(null as any));
this._text_72 = this.renderer.createText(this._el_68,'\n ',(null as any));
this._el_73 = import3.createRenderElement(this.renderer,this._el_68,'div',import3.EMPTY_INLINE_ARRAY,(null as any));
this._text_74 = this.renderer.createText(this._el_73,'123123',(null as any));
this._text_75 = this.renderer.createText(this._el_68,'\n ',(null as any));
this._el_76 = import3.createRenderElement(this.renderer,this._el_68,'div',import3.EMPTY_INLINE_ARRAY,(null as any));
this._text_77 = this.renderer.createText(this._el_76,'123123',(null as any));
this._text_78 = this.renderer.createText(this._el_68,'\n ',(null as any));
this._el_79 = import3.createRenderElement(this.renderer,this._el_68,'div',import3.EMPTY_INLINE_ARRAY,(null as any));
this._text_80 = this.renderer.createText(this._el_79,'123123',(null as any));
this._text_81 = this.renderer.createText(this._el_68,'\n ',(null as any));
this._el_82 = import3.createRenderElement(this.renderer,this._el_68,'div',import3.EMPTY_INLINE_ARRAY,(null as any));
this._text_83 = this.renderer.createText(this._el_82,'123123',(null as any));
this._text_84 = this.renderer.createText(this._el_68,'\n ',(null as any));
this._el_85 = import3.createRenderElement(this.renderer,this._el_68,'div',import3.EMPTY_INLINE_ARRAY,(null as any));
this._text_86 = this.renderer.createText(this._el_85,'123123',(null as any));
this._text_87 = this.renderer.createText(this._el_68,'\n ',(null as any));
this._el_88 = import3.createRenderElement(this.renderer,this._el_68,'div',import3.EMPTY_INLINE_ARRAY,(null as any));
this._text_89 = this.renderer.createText(this._el_88,'123123',(null as any));
this._text_90 = this.renderer.createText(this._el_68,'\n ',(null as any));
this._el_91 = import3.createRenderElement(this.renderer,this._el_68,'div',import3.EMPTY_INLINE_ARRAY,(null as any));
this._text_92 = this.renderer.createText(this._el_91,'123123',(null as any));
this._text_93 = this.renderer.createText(this._el_68,'\n ',(null as any));
this._el_94 = import3.createRenderElement(this.renderer,this._el_68,'div',import3.EMPTY_INLINE_ARRAY,(null as any));
this._text_95 = this.renderer.createText(this._el_94,'123123',(null as any));
this._text_96 = this.renderer.createText(this._el_68,'\n ',(null as any));
this._el_97 = import3.createRenderElement(this.renderer,this._el_68,'div',import3.EMPTY_INLINE_ARRAY,(null as any));
this._text_98 = this.renderer.createText(this._el_97,'123123',(null as any));
this._text_99 = this.renderer.createText(this._el_68,'\n ',(null as any));
this._el_100 = import3.createRenderElement(this.renderer,this._el_68,'div',import3.EMPTY_INLINE_ARRAY,(null as any));
this._text_101 = this.renderer.createText(this._el_100,'123123',(null as any));
this._text_102 = this.renderer.createText(this._el_68,'\n ',(null as any));
this._el_103 = import3.createRenderElement(this.renderer,this._el_68,'div',import3.EMPTY_INLINE_ARRAY,(null as any));
this._text_104 = this.renderer.createText(this._el_103,'123123',(null as any));
this._text_105 = this.renderer.createText(this._el_68,'\n ',(null as any));
this._text_106 = this.renderer.createText(this._el_66,'\n ',(null as any));
this._el_107 = import3.createRenderElement(this.renderer,this._el_66,'div',new import3.InlineArray2(2,'class','headmorebox'),(null as any));
this._text_108 = this.renderer.createText(this._el_107,'\n ',(null as any));
this._el_109 = import3.createRenderElement(this.renderer,this._el_107,'div',new import3.InlineArray2(2,'class','headmoreall'),(null as any));
this._text_110 = this.renderer.createText(this._el_109,'\n ',(null as any));
this._el_111 = import3.createRenderElement(this.renderer,this._el_109,'div',new import3.InlineArray2(2,'class','headmoretop'),(null as any));
this._text_112 = this.renderer.createText(this._el_111,'STATUS',(null as any));
this._text_113 = this.renderer.createText(this._el_109,'\n ',(null as any));
this._el_114 = import3.createRenderElement(this.renderer,this._el_109,'div',new import3.InlineArray2(2,'class','headmoremid'),(null as any));
this._text_115 = this.renderer.createText(this._el_114,'Electronic fan has stopped working',(null as any));
this._text_116 = this.renderer.createText(this._el_109,'\n ',(null as any));
this._el_117 = import3.createRenderElement(this.renderer,this._el_109,'div',import3.EMPTY_INLINE_ARRAY,(null as any));
this._text_118 = this.renderer.createText(this._el_109,'\n ',(null as any));
this._text_119 = this.renderer.createText(this._el_107,'\n ',(null as any));
this._text_120 = this.renderer.createText(this._el_66,'\n ',(null as any));
this._text_121 = this.renderer.createText(parentRenderNode,'\n ',(null as any));
var disposable_0:Function = import3.subscribeToRenderElement(this,this._el_47,new import3.InlineArray2(2,'click',(null as any)),this.eventHandler(this.handleEvent_47));
var disposable_1:Function = import3.subscribeToRenderElement(this,this._el_107,new import3.InlineArray2(2,'click',(null as any)),this.eventHandler(this.handleEvent_107));
this.init((null as any),((<any>this.renderer).directRenderer? (null as any): [
this._text_0,
this._el_1,
this._text_2,
this._el_3,
this._text_4,
this._anchor_5,
this._text_6,
this._anchor_7,
this._text_8,
this._text_9,
this._el_10,
this._text_11,
this._el_12,
this._text_13,
this._el_14,
this._text_15,
this._el_16,
this._text_17,
this._text_18,
this._el_19,
this._text_20,
this._el_21,
this._text_22,
this._text_23,
this._el_24,
this._text_25,
this._el_26,
this._text_27,
this._text_28,
this._text_29,
this._el_30,
this._text_31,
this._el_32,
this._text_33,
this._el_34,
this._text_35,
this._el_36,
this._text_37,
this._el_38,
this._text_39,
this._text_40,
this._el_41,
this._text_42,
this._text_43,
this._el_44,
this._text_45,
this._text_46,
this._el_47,
this._text_48,
this._text_49,
this._text_50,
this._el_51,
this._text_52,
this._el_53,
this._text_54,
this._el_55,
this._text_56,
this._text_57,
this._text_58,
this._el_59,
this._text_60,
this._el_61,
this._text_62,
this._text_63,
this._text_64,
this._text_65,
this._el_66,
this._text_67,
this._el_68,
this._text_69,
this._el_70,
this._text_71,
this._text_72,
this._el_73,
this._text_74,
this._text_75,
this._el_76,
this._text_77,
this._text_78,
this._el_79,
this._text_80,
this._text_81,
this._el_82,
this._text_83,
this._text_84,
this._el_85,
this._text_86,
this._text_87,
this._el_88,
this._text_89,
this._text_90,
this._el_91,
this._text_92,
this._text_93,
this._el_94,
this._text_95,
this._text_96,
this._el_97,
this._text_98,
this._text_99,
this._el_100,
this._text_101,
this._text_102,
this._el_103,
this._text_104,
this._text_105,
this._text_106,
this._el_107,
this._text_108,
this._el_109,
this._text_110,
this._el_111,
this._text_112,
this._text_113,
this._el_114,
this._text_115,
this._text_116,
this._el_117,
this._text_118,
this._text_119,
this._text_120,
this._text_121
]
),[
disposable_0,
disposable_1
]
);
return (null as any);
}
injectorGetInternal(token:any,requestNodeIndex:number,notFoundResult:any):any {
if (((token === import17.TemplateRef) && (5 === requestNodeIndex))) { return this._TemplateRef_5_5; }
if (((token === import18.NgIf) && (5 === requestNodeIndex))) { return this._NgIf_5_6.context; }
if (((token === import17.TemplateRef) && (7 === requestNodeIndex))) { return this._TemplateRef_7_5; }
if (((token === import18.NgIf) && (7 === requestNodeIndex))) { return this._NgIf_7_6.context; }
return notFoundResult;
}
detectChangesInternal(throwOnChange:boolean):void {
const currVal_128:any = this.context.style1;
if (import3.checkBinding(throwOnChange,this._expr_128,currVal_128)) {
var animationTransition_heroState:any = this.componentType.animations['heroState'](this,this._el_1,((this._expr_128 == import16.UNINITIALIZED)? 'void': this._expr_128),((currVal_128 == import16.UNINITIALIZED)? 'void': currVal_128));
this._expr_128 = currVal_128;
}
const currVal_129:any = this.context.topchange;
if (import3.checkBinding(throwOnChange,this._expr_129,currVal_129)) {
var animationTransition_topchange:any = this.componentType.animations['topchange'](this,this._el_12,((this._expr_129 == import16.UNINITIALIZED)? 'void': this._expr_129),((currVal_129 == import16.UNINITIALIZED)? 'void': currVal_129));
this._expr_129 = currVal_129;
}
const currVal_133:any = this.context.style2;
if (import3.checkBinding(throwOnChange,this._expr_133,currVal_133)) {
var animationTransition_heroState2:any = this.componentType.animations['heroState2'](this,this._el_68,((this._expr_133 == import16.UNINITIALIZED)? 'void': this._expr_133),((currVal_133 == import16.UNINITIALIZED)? 'void': currVal_133));
this._expr_133 = currVal_133;
}
const currVal_5_0_0:any = (this.context.leftbtn != '');
this._NgIf_5_6.check_ngIf(currVal_5_0_0,throwOnChange,false);
this._NgIf_5_6.ngDoCheck(this,this._anchor_5,throwOnChange);
const currVal_7_0_0:any = (this.context.rightbtn != '');
this._NgIf_7_6.check_ngIf(currVal_7_0_0,throwOnChange,false);
this._NgIf_7_6.ngDoCheck(this,this._anchor_7,throwOnChange);
this._vc_5.detectChangesInNestedViews(throwOnChange);
this._vc_7.detectChangesInNestedViews(throwOnChange);
const currVal_130:any = import3.inlineInterpolate(1,'head_bottom_dian ',this.context.dianhover,'');
if (import3.checkBinding(throwOnChange,this._expr_130,currVal_130)) {
this.renderer.setElementProperty(this._el_32,'className',currVal_130);
this._expr_130 = currVal_130;
}
const currVal_131:any = import3.inlineInterpolate(1,'up_or_down_icon head_up_or_downleft ',this.context.rotate_1801,'');
if (import3.checkBinding(throwOnChange,this._expr_131,currVal_131)) {
this.renderer.setElementProperty(this._el_41,'className',currVal_131);
this._expr_131 = currVal_131;
}
const currVal_132:any = import3.inlineInterpolate(1,'up_or_down_icon head_up_or_downright ',this.context.rotate_1801,'');
if (import3.checkBinding(throwOnChange,this._expr_132,currVal_132)) {
this.renderer.setElementProperty(this._el_44,'className',currVal_132);
this._expr_132 = currVal_132;
}
const currVal_134:any = import3.inlineInterpolate(1,'headmorebottom ',this.context.rotate_180,'');
if (import3.checkBinding(throwOnChange,this._expr_134,currVal_134)) {
this.renderer.setElementProperty(this._el_117,'className',currVal_134);
this._expr_134 = currVal_134;
}
}
destroyInternal():void {
this._vc_5.destroyNestedViews();
this._vc_7.destroyNestedViews();
}
detachInternal():void {
var animationTransition_heroState:any = this.componentType.animations['heroState'](this,this._el_1,this._expr_128,'void');
var animationTransition_topchange:any = this.componentType.animations['topchange'](this,this._el_12,this._expr_129,'void');
var animationTransition_heroState2:any = this.componentType.animations['heroState2'](this,this._el_68,this._expr_133,'void');
}
createEmbeddedViewInternal(nodeIndex:number):import1.AppView<any> {
if ((nodeIndex == 5)) { return new View_HeadComponent1(this.viewUtils,this,5,this._anchor_5,this._vc_5); }
if ((nodeIndex == 7)) { return new View_HeadComponent2(this.viewUtils,this,7,this._anchor_7,this._vc_7); }
return (null as any);
}
handleEvent_47(eventName:string,$event:any):boolean {
this.markPathToRootAsCheckOnce();
var result:boolean = true;
if ((eventName == 'click')) {
const pd_sub_0:any = ((<any>this.context.changestyle()) !== false);
result = (pd_sub_0 && result);
}
return result;
}
handleEvent_107(eventName:string,$event:any):boolean {
this.markPathToRootAsCheckOnce();
var result:boolean = true;
if ((eventName == 'click')) {
const pd_sub_0:any = ((<any>this.context.changestyle2()) !== false);
result = (pd_sub_0 && result);
}
return result;
}
}
class View_HeadComponent1 extends import1.AppView<any> {
_el_0:any;
_text_1:any;
/*private*/ _expr_2:any;
constructor(viewUtils:import3.ViewUtils,parentView:import1.AppView<any>,parentIndex:number,parentElement:any,declaredViewContainer:import14.ViewContainer) {
super(View_HeadComponent1,renderType_HeadComponent,import5.ViewType.EMBEDDED,viewUtils,parentView,parentIndex,parentElement,import6.ChangeDetectorStatus.CheckAlways,declaredViewContainer);
this._expr_2 = import16.UNINITIALIZED;
}
createInternal(rootSelector:string):import7.ComponentRef<any> {
this._el_0 = import3.createRenderElement(this.renderer,(null as any),'button',new import3.InlineArray2(2,'class','head_top_btnleft'),(null as any));
this._text_1 = this.renderer.createText(this._el_0,'',(null as any));
var disposable_0:Function = import3.subscribeToRenderElement(this,this._el_0,new import3.InlineArray2(2,'click',(null as any)),this.eventHandler(this.handleEvent_0));
this.init(this._el_0,((<any>this.renderer).directRenderer? (null as any): [
this._el_0,
this._text_1
]
),[disposable_0]);
return (null as any);
}
detectChangesInternal(throwOnChange:boolean):void {
const currVal_2:any = import3.inlineInterpolate(1,'',this.parentView.context.leftbtn,'');
if (import3.checkBinding(throwOnChange,this._expr_2,currVal_2)) {
this.renderer.setText(this._text_1,currVal_2);
this._expr_2 = currVal_2;
}
}
visitRootNodesInternal(cb:any,ctx:any):void {
cb(this._el_0,ctx);
}
handleEvent_0(eventName:string,$event:any):boolean {
this.markPathToRootAsCheckOnce();
var result:boolean = true;
if ((eventName == 'click')) {
const pd_sub_0:any = ((<any>this.parentView.context.changeheadleft()) !== false);
result = (pd_sub_0 && result);
}
return result;
}
}
class View_HeadComponent2 extends import1.AppView<any> {
_el_0:any;
_text_1:any;
/*private*/ _expr_2:any;
constructor(viewUtils:import3.ViewUtils,parentView:import1.AppView<any>,parentIndex:number,parentElement:any,declaredViewContainer:import14.ViewContainer) {
super(View_HeadComponent2,renderType_HeadComponent,import5.ViewType.EMBEDDED,viewUtils,parentView,parentIndex,parentElement,import6.ChangeDetectorStatus.CheckAlways,declaredViewContainer);
this._expr_2 = import16.UNINITIALIZED;
}
createInternal(rootSelector:string):import7.ComponentRef<any> {
this._el_0 = import3.createRenderElement(this.renderer,(null as any),'button',new import3.InlineArray2(2,'class','head_top_btnright'),(null as any));
this._text_1 = this.renderer.createText(this._el_0,'',(null as any));
var disposable_0:Function = import3.subscribeToRenderElement(this,this._el_0,new import3.InlineArray2(2,'click',(null as any)),this.eventHandler(this.handleEvent_0));
this.init(this._el_0,((<any>this.renderer).directRenderer? (null as any): [
this._el_0,
this._text_1
]
),[disposable_0]);
return (null as any);
}
detectChangesInternal(throwOnChange:boolean):void {
const currVal_2:any = import3.inlineInterpolate(1,'',this.parentView.context.rightbtn,'');
if (import3.checkBinding(throwOnChange,this._expr_2,currVal_2)) {
this.renderer.setText(this._text_1,currVal_2);
this._expr_2 = currVal_2;
}
}
visitRootNodesInternal(cb:any,ctx:any):void {
cb(this._el_0,ctx);
}
handleEvent_0(eventName:string,$event:any):boolean {
this.markPathToRootAsCheckOnce();
var result:boolean = true;
if ((eventName == 'click')) {
const pd_sub_0:any = ((<any>this.parentView.context.changeheadright()) !== false);
result = (pd_sub_0 && result);
}
return result;
}
} |
#!/bin/bash
#adding comments to make code readable
set -o pipefail
LOG_FILE="/var/log/OKE-kubeflow-initialize.log"
log() {
echo "$(date) [${EXECNAME}]: $*" >> "${LOG_FILE}"
}
region=`curl -s -H "Authorization: Bearer Oracle" -L http://169.254.169.254/opc/v2/instance/regionInfo/regionIdentifier`
namespace=`curl -s -H "Authorization: Bearer Oracle" -L http://169.254.169.254/opc/v2/instance/metadata/namespace`
oke_cluster_id=`curl -s -H "Authorization: Bearer Oracle" -L http://169.254.169.254/opc/v1/instance/metadata/oke_cluster_id`
kubeflow_password=`curl -s -H "Authorization: Bearer Oracle" -L http://169.254.169.254/opc/v1/instance/metadata/kubeflow_password`
country=`echo $region|awk -F'-' '{print $1}'`
city=`echo $region|awk -F'-' '{print $2}'`
EXECNAME="Kubectl & Git"
log "->Install"
# Get the latest kubectl and not use archaic ones that are in default repo
cat <<EOF | sudo tee /etc/yum.repos.d/kubernetes.repo
[kubernetes]
name=Kubernetes
baseurl=https://packages.cloud.google.com/yum/repos/kubernetes-el7-x86_64
enabled=1
gpgcheck=0
repo_gpgcheck=0
gpgkey=https://packages.cloud.google.com/yum/doc/yum-key.gpg https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg
EOF
yum install kubectl git screen -y >> $LOG_FILE
# Kubectl is installed and now, you need to configure kubectl
log "->Configure"
mkdir -p /home/opc/.kube
echo "source <(kubectl completion bash)" >> ~/.bashrc
echo "alias k='kubectl'" >> ~/.bashrc
echo "source <(kubectl completion bash)" >> /home/opc/.bashrc
echo "alias k='kubectl'" >> /home/opc/.bashrc
source ~/.bashrc
# Get the OCI CLI installed
EXECNAME="OCI CLI"
log "->Download"
curl -L -O https://raw.githubusercontent.com/oracle/oci-cli/master/scripts/install/install.sh >> $LOG_FILE
chmod a+x install.sh
log "->Install"
./install.sh --accept-all-defaults >> $LOG_FILE
echo "export OCI_CLI_AUTH=instance_principal" >> ~/.bash_profile
echo "export OCI_CLI_AUTH=instance_principal" >> ~/.bashrc
echo "export OCI_CLI_AUTH=instance_principal" >> /home/opc/.bash_profile
echo "export OCI_CLI_AUTH=instance_principal" >> /home/opc/.bashrc
EXECNAME="Kubeconfig"
log "->Generate"
RET_CODE=1
INDEX_NR=1
SLEEP_TIME="10s"
while [ ! -f /root/.kube/config ]
do
sleep 5
source ~/.bashrc
fetch_metadata
log "-->Attempting to generate kubeconfig"
oci ce cluster create-kubeconfig --cluster-id ${oke_cluster_id} --file /root/.kube/config --region ${region} --token-version 2.0.0 >> $LOG_FILE
log "-->Finished attempt"
done
mkdir -p /home/opc/.kube/
cp /root/.kube/config /home/opc/.kube/config
chown -R opc:opc /home/opc/.kube/
EXECNAME="Kustomize"
log "->Fetch & deploy to /bin/"
# Now that we have kubectl configured, let us download kustomize
wget https://github.com/kubernetes-sigs/kustomize/releases/download/v3.2.0/kustomize_3.2.0_linux_amd64
mv kustomize_3.2.0_linux_amd64 /bin/kustomize
chmod +x /bin/kustomize
EXECNAME="Kubeflow"
log "->Clone Repo"
mkdir -p /opt/kubeflow
cd /opt/kubeflow
git clone https://github.com/kubeflow/manifests.git >> $LOG_FILE
cd manifests
cp common/dex/base/config-map.yaml common/dex/base/config-map.yaml.DEFAULT
cat common/dex/base/config-map.yaml.DEFAULT |sed "s|hash:.*|hash: $kubeflow_password|" >common/dex/base/config-map.yaml
log "->Install via Kustomize"
source <(kubectl completion bash)
log "-->Build & Deploy Kubeflow"
while ! kustomize build example | kubectl apply --kubeconfig /root/.kube/config -f - | tee -a $LOG_FILE; do echo 'Retrying to apply resources'; sleep 60; done
cat <<EOF | sudo tee /tmp/patchservice_lb.yaml
spec:
type: LoadBalancer
metadata:
annotations:
oci.oraclecloud.com/load-balancer-type: "nlb"
EOF
for i in {1..3}; do
if [ $(kubectl --kubeconfig /root/.kube/config get pods -n istio-system --no-headers=true |egrep -i ingressgateway | awk '{print $3}') = "Running" ]; then
echo "Ingress Gateway has been created successfully"
break
fi
sleep 60
done
kubectl --kubeconfig /root/.kube/config patch svc istio-ingressgateway -n istio-system -p "$(cat /tmp/patchservice_lb.yaml)" | tee -a $LOG_FILE
sleep 120
#LBIP=$(kubectl get svc istio-ingressgateway -n istio-system -o=jsonpath="{.spec.loadBalancerIP}")
LBIP=$(kubectl --kubeconfig /root/.kube/config get svc istio-ingressgateway -n istio-system -o=jsonpath="{.status.loadBalancer.ingress[0].ip}")
echo "Load Balancer IP is ${LBIP}" |tee -a $LOG_FILE
mkdir -p kfsecure
cd kfsecure
cat <<EOF | sudo tee san.cnf
[req]
default_bits = 2048
distinguished_name = req_distinguished_name
req_extensions = req_ext
x509_extensions = v3_req
prompt = no
[req_distinguished_name]
countryName = ${country}
stateOrProvinceName = ${city}
localityName = N/A
organizationName = Self-signed certificate
commonName = ${LBIP}: Self-signed certificate
[req_ext]
subjectAltName = @alt_names
[v3_req]
subjectAltName = @alt_names
[alt_names]
IP.1 = ${LBIP}
EOF
# openSSL create keys
openssl req -x509 -nodes -days 730 -newkey rsa:2048 -keyout key.tls -out cert.tls -config san.cnf
kubectl --kubeconfig /root/.kube/config create -n istio-system secret tls kubeflow-tls-cert --key=key.tls --cert=cert.tls | tee -a $LOG_FILE
cat <<EOF | sudo tee sslenableingress.yaml
apiVersion: v1
items:
- apiVersion: networking.istio.io/v1beta1
kind: Gateway
metadata:
annotations:
name: kubeflow-gateway
namespace: kubeflow
spec:
selector:
istio: ingressgateway
servers:
- hosts:
- "*"
port:
name: https
number: 443
protocol: HTTPS
tls:
mode: SIMPLE
credentialName: kubeflow-tls-cert
- hosts:
- "*"
port:
name: http
number: 80
protocol: HTTP
tls:
httpsRedirect: true
kind: List
metadata:
resourceVersion: ""
selfLink: ""
EOF
kubectl --kubeconfig /root/.kube/config apply -f sslenableingress.yaml
echo "Load Balancer IP is ${LBIP}" |tee -a $LOG_FILE
echo "Point your browser to https://${LBIP}" |tee -a $LOG_FILE
|
#!/usr/bin/env zsh
PID_FILE=client.pid
PID=$(cat "${PID_FILE}");
if [ -z "${PID}" ]; then
echo "Process id for clients is written to location: {$PID_FILE}"
go build ../client/
./client&
echo $! >> ${PID_FILE}
else
echo "Clients are already started in this folder."
exit 0
fi
|
module.exports = {
up: (queryInterface) => {
return queryInterface.bulkInsert('NodeUptime', [
{// NODE_1
isWorking: true,
nodeId: 1,
createdAt: new Date('2019-06-11T17:59:40').toLocaleString(),
updatedAt: new Date('2019-06-11T17:59:40').toLocaleString()
},
{
isWorking: false,
nodeId: 1,
createdAt: new Date('2019-06-11T18:59:42').toLocaleString(),
updatedAt: new Date('2019-06-11T18:59:42').toLocaleString()
},
{
isWorking: true,
nodeId: 1,
createdAt: new Date('2019-06-11T19:59:40').toLocaleString(),
updatedAt: new Date('2019-06-11T19:59:40').toLocaleString()
},
{// NODE_2
isWorking: false,
nodeId: 2,
createdAt: new Date('2019-03-08T13:58:12').toLocaleString(),
updatedAt: new Date('2019-03-08T13:58:12').toLocaleString()
},
{
isWorking: true,
nodeId: 2,
createdAt: new Date('2018-10-14T12:54:00').toLocaleString(),
updatedAt: new Date('2018-10-14T12:54:00').toLocaleString()
},
{// NODE_3
isWorking: true,
nodeId: 3,
createdAt: new Date('2019-06-12T19:59:40').toLocaleString(),
updatedAt: new Date('2019-06-12T19:59:40').toLocaleString()
},
{// NODE_4
isWorking: false,
nodeId: 4,
createdAt: new Date('2019-06-12T19:59:40').toLocaleString(),
updatedAt: new Date('2019-06-12T19:59:40').toLocaleString()
},
{// NODE_5
isWorking: true,
nodeId: 5,
createdAt: new Date('2019-06-13T19:59:40').toLocaleString(),
updatedAt: new Date('2019-06-13T19:59:40').toLocaleString()
},
{// NODE_6
isWorking: false,
nodeId: 6,
createdAt: new Date('2019-06-13T19:59:40').toLocaleString(),
updatedAt: new Date('2019-06-13T19:59:40').toLocaleString()
},
{// NODE_7
isWorking: true,
nodeId: 7,
createdAt: new Date('2019-06-13T19:59:40').toLocaleString(),
updatedAt: new Date('2019-06-13T19:59:40').toLocaleString()
},
{// NODE_8
isWorking: false,
nodeId: 8,
createdAt: new Date('2019-06-13T19:59:40').toLocaleString(),
updatedAt: new Date('2019-06-13T19:59:40').toLocaleString()
},], {});
},
down: (queryInterface) => {
return queryInterface.bulkDelete('NodeUptime', null, {});
}
};
|
import re
def extractScriptUrls(html):
script_urls = set()
script_pattern = r'<script\s+[^>]*?src=["\']([^"\']+?)["\'][^>]*?>'
inline_script_pattern = r'<script>(.*?)</script>'
# Extract external script URLs
script_matches = re.findall(script_pattern, html, flags=re.IGNORECASE)
script_urls.update(script_matches)
# Extract inline script URLs (if any)
inline_script_matches = re.findall(inline_script_pattern, html, flags=re.DOTALL)
for inline_script in inline_script_matches:
if "src" in inline_script:
inline_src_match = re.search(r'src=["\']([^"\']+?)["\']', inline_script, flags=re.IGNORECASE)
if inline_src_match:
script_urls.add(inline_src_match.group(1))
return list(script_urls) |
#!/bin/bash
# ----------------------------------------------------------------------------
#
# Package : load-bmfont
# Version : v1.4.1
# Source : https://github.com/Jam3/load-bmfont.git
# Tested on : RHEL 7.6
# Node Version : v12.16.1
# Maintainer : Amol Patil <amol.patil2@ibm.com>
#
# Disclaimer: This script has been tested in non-root mode on given
# ========== platform using the mentioned version of the package.
# It may not work as expected with newer versions of the
# package and/or distribution. In such case, please
# contact "Maintainer" of this script.
# ----------------------------------------------------------------------------
set -e
# Install all dependencies.
sudo yum clean all
sudo yum -y update
PACKAGE_VERSION=v1.4.1
#Install nvm
if [ ! -d ~/.nvm ]; then
#Install the required dependencies
sudo yum install -y openssl-devel.ppc64le curl git
curl https://raw.githubusercontent.com/creationix/nvm/v0.33.0/install.sh | bash
fi
source ~/.nvm/nvm.sh
#Install node version v12.16.1
if [ `nvm list | grep -c "v12.16.1"` -eq 0 ]
then
nvm install v12.16.1
fi
nvm alias default v12.16.1
git clone https://github.com/Jam3/load-bmfont.git && cd load-bmfont
git checkout $PACKAGE_VERSION
npm install npm@6.14.5
npm install
npm run test-node
|
#!/bin/bash
(
cd cfssl
./gencert.sh
FILE_ARGS=()
for i in ca server peer client; do
cp "${i}.pem" "${i}Cert"
cp "${i}-key.pem" "${i}Key"
FILE_ARGS+=(--from-file "${i}Cert" --from-file "${i}Key")
done
kubectl create secret generic test-kafka-operator -n kafka "${FILE_ARGS[@]}" --dry-run -o yaml
)
|
#!/usr/bin/env bash
#
# SPDX-License-Identifier: Apache-2.0
#
set -ex
ansible-playbook 01-create-ordering-organization-components.yml
ansible-playbook 02-create-endorsing-organization-components.yml
ansible-playbook 03-export-organization.yml
ansible-playbook 04-import-organization.yml
ansible-playbook 05-add-organization-to-consortium.yml
ansible-playbook 06-export-ordering-service.yml
ansible-playbook 07-import-ordering-service.yml
ansible-playbook 08-create-channel.yml
ansible-playbook 09-join-peer-to-channel.yml
ansible-playbook 10-add-anchor-peer-to-channel.yml
ansible-playbook 11-create-endorsing-organization-components.yml
ansible-playbook 12-export-organization.yml
ansible-playbook 13-import-organization.yml
ansible-playbook 14-add-organization-to-channel.yml
ansible-playbook 15-import-ordering-service.yml
ansible-playbook 16-join-peer-to-channel.yml
ansible-playbook 17-add-anchor-peer-to-channel.yml
ansible-playbook 18-install-chaincode.yml
ansible-playbook 19-install-chaincode.yml
ansible-playbook 20-instantiate-chaincode.yml
ansible-playbook 21-register-application.yml
ansible-playbook 22-register-application.yml
|
// Package dltree implements red-black tree for key value pairs with domain label keys.
package dltree
import "github.com/infobloxopen/go-trees/domain"
// Tree is a red-black tree for key-value pairs where key is domain label.
type Tree struct {
root *node
}
// Pair is a key-value pair representing tree node content.
type Pair struct {
Key string
Value interface{}
}
// NewTree creates empty tree.
func NewTree() *Tree {
return new(Tree)
}
// Insert puts given key-value pair to the tree and returns pointer to new root.
func (t *Tree) Insert(key string, value interface{}) *Tree {
var (
n *node
)
if t != nil {
n = t.root
}
dl, _ := domain.MakeLabel(key)
return &Tree{root: n.insert(dl, value)}
}
// RawInsert puts given key-value pair to the tree and returns pointer to new root. Expects bindary domain label on input.
func (t *Tree) RawInsert(key string, value interface{}) *Tree {
var (
n *node
)
if t != nil {
n = t.root
}
return &Tree{root: n.insert(key, value)}
}
// InplaceInsert inserts or replaces given key-value pair in the tree. The method inserts data directly to current tree so make sure you have exclusive access to it.
func (t *Tree) InplaceInsert(key string, value interface{}) {
dl, _ := domain.MakeLabel(key)
t.root = t.root.inplaceInsert(dl, value)
}
// RawInplaceInsert inserts or replaces given key-value pair in the tree. The method inserts data directly to current tree so make sure you have exclusive access to it. Expects bindary domain label on input.
func (t *Tree) RawInplaceInsert(key string, value interface{}) {
t.root = t.root.inplaceInsert(key, value)
}
// Get returns value by given key.
func (t *Tree) Get(key string) (interface{}, bool) {
if t == nil {
return nil, false
}
dl, _ := domain.MakeLabel(key)
return t.root.get(dl)
}
// RawGet returns value by given key. Expects bindary domain label on input.
func (t *Tree) RawGet(key string) (interface{}, bool) {
if t == nil {
return nil, false
}
return t.root.get(key)
}
// Enumerate returns channel which is populated by key pair values in order of keys.
func (t *Tree) Enumerate() chan Pair {
ch := make(chan Pair)
go func() {
defer close(ch)
if t == nil {
return
}
t.root.enumerate(ch)
}()
return ch
}
// RawEnumerate returns channel which is populated by key pair values in order of keys. Returns binary domain labels.
func (t *Tree) RawEnumerate() chan Pair {
ch := make(chan Pair)
go func() {
defer close(ch)
if t == nil {
return
}
t.root.rawEnumerate(ch)
}()
return ch
}
// Delete removes node by given key. It returns copy of tree and true if node has been indeed deleted otherwise copy of tree and false.
func (t *Tree) Delete(key string) (*Tree, bool) {
if t == nil {
return nil, false
}
dl, _ := domain.MakeLabel(key)
root, ok := t.root.del(dl)
return &Tree{root: root}, ok
}
// RawDelete removes node by given key. It returns copy of tree and true if node has been indeed deleted otherwise copy of tree and false. Expects bindary domain label on input.
func (t *Tree) RawDelete(key string) (*Tree, bool) {
if t == nil {
return nil, false
}
root, ok := t.root.del(key)
return &Tree{root: root}, ok
}
// IsEmpty returns true if given tree has no nodes.
func (t *Tree) IsEmpty() bool {
return t == nil || t.root == nil
}
// Dot dumps tree to Graphviz .dot format.
func (t *Tree) Dot() string {
body := ""
if t != nil {
body = t.root.dot()
}
return "digraph d {\n" + body + "}\n"
}
|
use serde::{Deserialize, Serialize};
#[derive(Debug, Deserialize, Serialize)]
pub struct GuildIntegrationsUpdate {
pub guild_id: String,
// Add other fields if necessary
}
fn validate_guild_id(json_data: &str) -> Result<(), String> {
let result: Result<GuildIntegrationsUpdate, _> = serde_json::from_str(json_data);
match result {
Ok(update) => {
if update.guild_id.is_empty() || !update.guild_id.chars().all(|c| c.is_alphanumeric() || c == '_' || c == '-') {
Err("Invalid guild_id".to_string())
} else {
Ok(())
}
}
Err(_) => Err("Invalid JSON format".to_string()),
}
} |
'use strict';
const minHostBits = require('min-host-bits');
module.exports = n =>
32 - minHostBits(n);
|
var answersFrom = {};
navigator.getUserMedia({ audio: true }, function (stream) {
pc.addStream(stream);
}, error);
function createOffer () {
pc.createOffer(function(offer) {
pc.setLocalDescription(new sessionDescription(offer), function () {
socket.emit('make-offer', {
offer: offer
});
}, error);
}, error);
}
socket.on('answer-made', function (data) {
pc.setRemoteDescription(new sessionDescription(data.answer), function () {
if (!answersFrom[data.socket]) {
createOffer(data.socket);
answersFrom[data.socket] = true;
}
}, error);
});
var btn = document.getElementById('broadcast');
btn.addEventListener('click', function () {
if (btn.getAttribute('class') === 'stop') {
btn.setAttribute('class', 'play');
btn.innerHTML = 'Broadcast';
} else {
btn.setAttribute('class', 'stop');
btn.innerHTML = 'Broadcasting...';
createOffer();
}
});
|
<reponame>celikmustafa89/anomalyDetectTool
package com.anomalydetect.BruteForce;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.Test;
public class AlphabetTreeNodeTest {
@Before
public void setUp() throws Exception {
}
@Test
public void isHaveLabel() {
}
} |
package hex;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import water.*;
import water.fvec.*;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Arrays;
// test cases:
// skipMissing = TRUE/FALSE
// useAllLevels = TRUE/FALSE
// limit enums
// (dont) standardize predictor columns
// data info tests with interactions
public class DataInfoTest extends TestUtil {
@BeforeClass static public void setup() { stall_till_cloudsize(1); }
@Test public void testAirlines1() { // just test that it works at all
Frame fr = parse_test_file(Key.make("a.hex"), "smalldata/airlines/allyears2k_headers.zip");
try {
DataInfo dinfo = new DataInfo(
fr.clone(), // train
null, // valid
1, // num responses
true, // use all factor levels
DataInfo.TransformType.STANDARDIZE, // predictor transform
DataInfo.TransformType.NONE, // response transform
true, // skip missing
false, // impute missing
false, // missing bucket
false, // weight
false, // offset
false, // fold
Model.InteractionSpec.allPairwise(new String[]{fr.name(8),fr.name(16),fr.name(2)}) // interactions
);
dinfo.dropInteractions();
dinfo.remove();
} finally {
fr.delete();
}
}
@Test public void testAirlines2() {
Frame fr = parse_test_file(Key.make("a.hex"), "smalldata/airlines/allyears2k_headers.zip");
try {
Frame interactions = Model.makeInteractions(fr, false, Model.InteractionPair.generatePairwiseInteractionsFromList(8, 16, 2), true, true,true);
int len=0;
for(Vec v: interactions.vecs()) len += ((InteractionWrappedVec)v).expandedLength();
interactions.delete();
Assert.assertTrue(len==290+132+10);
DataInfo dinfo__noInteractions = new DataInfo(
fr.clone(), // train
null, // valid
1, // num responses
true, // use all factor levels
DataInfo.TransformType.STANDARDIZE, // predictor transform
DataInfo.TransformType.NONE, // response transform
true, // skip missing
false, // impute missing
false, // missing bucket
false, // weight
false, // offset
false, // fold
null
);
System.out.println(dinfo__noInteractions.fullN());
System.out.println(dinfo__noInteractions.numNums());
DataInfo dinfo__withInteractions = new DataInfo(
fr.clone(), // train
null, // valid
1, // num responses
true, // use all factor levels
DataInfo.TransformType.STANDARDIZE, // predictor transform
DataInfo.TransformType.NONE, // response transform
true, // skip missing
false, // impute missing
false, // missing bucket
false, // weight
false, // offset
false, // fold
Model.InteractionSpec.allPairwise(new String[]{fr.name(8),fr.name(16),fr.name(2)}) // interactions
);
System.out.println(dinfo__withInteractions.fullN());
Assert.assertTrue(dinfo__withInteractions.fullN() == dinfo__noInteractions.fullN() + len);
dinfo__withInteractions.dropInteractions();
dinfo__noInteractions.remove();
dinfo__withInteractions.remove();
} finally {
fr.delete();
}
}
@Test public void testAirlines3() {
Frame fr = parse_test_file(Key.make("a.hex"), "smalldata/airlines/allyears2k_headers.zip");
try {
Frame interactions = Model.makeInteractions(fr, false, Model.InteractionPair.generatePairwiseInteractionsFromList(8, 16, 2), false, true, true);
int len=0;
for(Vec v: interactions.vecs()) len += ((InteractionWrappedVec)v).expandedLength();
interactions.delete();
Assert.assertTrue(len==426);
DataInfo dinfo__noInteractions = new DataInfo(
fr.clone(), // train
null, // valid
1, // num responses
false, // use all factor levels
DataInfo.TransformType.STANDARDIZE, // predictor transform
DataInfo.TransformType.NONE, // response transform
true, // skip missing
false, // impute missing
false, // missing bucket
false, // weight
false, // offset
false, // fold
null
);
System.out.println(dinfo__noInteractions.fullN());
DataInfo dinfo__withInteractions = new DataInfo(
fr.clone(), // train
null, // valid
1, // num responses
false, // use all factor levels
DataInfo.TransformType.STANDARDIZE, // predictor transform
DataInfo.TransformType.NONE, // response transform
true, // skip missing
false, // impute missing
false, // missing bucket
false, // weight
false, // offset
false, // fold
Model.InteractionSpec.allPairwise(new String[]{fr.name(8),fr.name(16),fr.name(2)}) // interactions
);
System.out.println(dinfo__withInteractions.fullN());
Assert.assertTrue(dinfo__withInteractions.fullN() == dinfo__noInteractions.fullN() + len);
dinfo__withInteractions.dropInteractions();
dinfo__noInteractions.remove();
dinfo__withInteractions.remove();
} finally {
fr.delete();
}
}
@Test public void testAirlinesInteractionSpec() {
try {
Scope.enter();
Frame fr = Scope.track(parse_test_file(Key.make("a.hex"), "smalldata/airlines/allyears2k_headers.zip"));
Model.InteractionSpec interactionSpec = Model.InteractionSpec.create(
null,
new StringPair[]{new StringPair("UniqueCarrier", "Origin"), new StringPair("Origin", "DayofMonth")},
new String[]{"UniqueCarrier"}
);
DataInfo dinfo = new DataInfo(
fr.clone(), // train
null, // valid
1, // num responses
false, // use all factor levels
DataInfo.TransformType.STANDARDIZE, // predictor transform
DataInfo.TransformType.NONE, // response transform
true, // skip missing
false, // impute missing
false, // missing bucket
false, // weight
false, // offset
false, // fold
interactionSpec // interactions
);
Scope.track_generic(dinfo);
Assert.assertArrayEquals(new String[]{
"TailNum", "UniqueCarrier_Origin", "Dest", "Origin", "CancellationCode", "IsArrDelayed", "Origin_DayofMonth",
"Year", "Month", "DayofMonth", "DayOfWeek", "DepTime", "CRSDepTime", "ArrTime", "CRSArrTime", "FlightNum",
"ActualElapsedTime", "CRSElapsedTime", "AirTime", "ArrDelay", "DepDelay", "Distance", "TaxiIn", "TaxiOut",
"Cancelled", "Diverted", "CarrierDelay", "WeatherDelay", "NASDelay", "SecurityDelay", "LateAircraftDelay",
"IsDepDelayed"}, dinfo._adaptedFrame._names);
} finally {
Scope.exit();
}
}
@Test public void testIris1() { // test that getting sparseRows and denseRows produce the same results
Frame fr = parse_test_file(Key.make("a.hex"), "smalldata/iris/iris_wheader.csv");
fr.swap(1,4);
Model.InteractionPair[] ips = Model.InteractionPair.generatePairwiseInteractionsFromList(0, 1);
DataInfo di=null;
try {
di = new DataInfo(
fr.clone(), // train
null, // valid
1, // num responses
true, // use all factor levels
DataInfo.TransformType.NONE, // predictor transform
DataInfo.TransformType.NONE, // response transform
true, // skip missing
false, // impute missing
false, // missing bucket
false, // weight
false, // offset
false, // fold
Model.InteractionSpec.allPairwise(new String[]{fr.name(0),fr.name(1)}) // interactions
);
checker(di,false);
} finally {
fr.delete();
if( di!=null ) {
di.dropInteractions();
di.remove();
}
}
}
@Test public void testIris2() { // test that getting sparseRows and denseRows produce the same results
Frame fr = parse_test_file(Key.make("a.hex"), "smalldata/iris/iris_wheader.csv");
fr.swap(1,4);
Model.InteractionPair[] ips = Model.InteractionPair.generatePairwiseInteractionsFromList(0, 1);
DataInfo di=null;
try {
di = new DataInfo(
fr.clone(), // train
null, // valid
1, // num responses
true, // use all factor levels
DataInfo.TransformType.STANDARDIZE, // predictor transform
DataInfo.TransformType.NONE, // response transform
true, // skip missing
false, // impute missing
false, // missing bucket
false, // weight
false, // offset
false, // fold
Model.InteractionSpec.allPairwise(new String[]{fr.name(0),fr.name(1)}) // interactions
);
checker(di,true);
} finally {
fr.delete();
if( di!=null ) {
di.dropInteractions();
di.remove();
}
}
}
@Test public void testIris3() { // test that getting sparseRows and denseRows produce the same results
Frame fr = parse_test_file(Key.make("a.hex"), "smalldata/iris/iris_wheader.csv");
fr.swap(2,4);
Model.InteractionPair[] ips = Model.InteractionPair.generatePairwiseInteractionsFromList(0, 1, 2, 3);
DataInfo di=null;
try {
di = new DataInfo(
fr.clone(), // train
null, // valid
1, // num responses
true, // use all factor levels
DataInfo.TransformType.STANDARDIZE, // predictor transform
DataInfo.TransformType.NONE, // response transform
true, // skip missing
false, // impute missing
false, // missing bucket
false, // weight
false, // offset
false, // fold
Model.InteractionSpec.allPairwise(new String[]{fr.name(0),fr.name(1),fr.name(2),fr.name(3)}) // interactions
);
checker(di,true);
} finally {
fr.delete();
if( di!=null ) {
di.dropInteractions();
di.remove();
}
}
}
@Test public void testAirlines4() {
Frame fr = parse_test_file(Key.make("a0.hex"), "smalldata/airlines/allyears2k_headers.zip");
// fixme need to rebalance to 1 chunk, otherwise the test does not pass!
Key k = Key.make("a.hex");
H2O.submitTask(new RebalanceDataSet(fr,k,1)).join();
fr.delete();
fr = DKV.getGet(k);
Model.InteractionPair[] ips = Model.InteractionPair.generatePairwiseInteractionsFromList(8,16,2);
DataInfo di=null;
try {
di = new DataInfo(
fr.clone(), // train
null, // valid
1, // num responses
true, // use all factor levels
DataInfo.TransformType.STANDARDIZE, // predictor transform
DataInfo.TransformType.NONE, // response transform
true, // skip missing
false, // impute missing
false, // missing bucket
false, // weight
false, // offset
false, // fold
Model.InteractionSpec.allPairwise(new String[]{fr.name(8),fr.name(16),fr.name(2)}) // interactions
);
checker(di,true);
} finally {
fr.delete();
if( di!=null ) {
di.dropInteractions();
di.remove();
}
}
}
@Test public void testAirlines5() {
Frame fr = parse_test_file(Key.make("a0.hex"), "smalldata/airlines/allyears2k_headers.zip");
// fixme need to rebalance to 1 chunk, otherwise the test does not pass!
Key k = Key.make("a.hex");
H2O.submitTask(new RebalanceDataSet(fr,k,1)).join();
fr.delete();
fr = DKV.getGet(k);
Model.InteractionPair[] ips = Model.InteractionPair.generatePairwiseInteractionsFromList(8,16,2);
DataInfo di=null;
try {
di = new DataInfo(
fr.clone(), // train
null, // valid
1, // num responses
false, // use all factor levels
DataInfo.TransformType.STANDARDIZE, // predictor transform
DataInfo.TransformType.NONE, // response transform
true, // skip missing
false, // impute missing
false, // missing bucket
false, // weight
false, // offset
false, // fold
Model.InteractionSpec.allPairwise(new String[]{fr.name(8),fr.name(16),fr.name(2)}) // interactions
);
checker(di,true);
} finally {
fr.delete();
if( di!=null ) {
di.dropInteractions();
di.remove();
}
}
}
@Test public void testCoefNames() throws IOException { // just test that it works at all
Frame fr = parse_test_file(Key.make("a.hex"), "smalldata/airlines/allyears2k_headers.zip");
DataInfo dinfo = null;
try {
dinfo = new DataInfo(
fr.clone(), // train
null, // valid
1, // num responses
true, // use all factor levels
DataInfo.TransformType.STANDARDIZE, // predictor transform
DataInfo.TransformType.NONE, // response transform
true, // skip missing
false, // impute missing
false, // missing bucket
false, // weight
false, // offset
false, // fold
Model.InteractionSpec.allPairwise(new String[]{fr.name(8),fr.name(16),fr.name(2)}) // interactions
);
Assert.assertNull(dinfo._coefNames); // coef names are not populated at first
final String[] cn = dinfo.coefNames();
Assert.assertNotNull(cn);
Assert.assertArrayEquals(cn, dinfo._coefNames); // coef names are cached after first accessed
DKV.put(dinfo);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
dinfo.writeAll(new AutoBuffer(baos, true)).close();
baos.close();
ByteArrayInputStream input = new ByteArrayInputStream(baos.toByteArray());
DataInfo deserialized = (DataInfo) Keyed.readAll(new AutoBuffer(input));
Assert.assertNotNull(deserialized);
Assert.assertArrayEquals(cn, deserialized._coefNames); // coef names were preserved in the deserialized object
} finally {
if (dinfo != null) {
dinfo.dropInteractions();
dinfo.remove();
}
fr.delete();
}
}
@Test public void testInteractionsForcedAllFactors() {
try {
Scope.enter();
Frame fr = Scope.track(parse_test_file(Key.make("a.hex"), "smalldata/airlines/allyears2k_headers.zip"));
Frame sfr = fr.subframe(new String[]{"Origin", "Distance"});
Model.InteractionSpec interactionSpec = Model.InteractionSpec.create(
new String[]{"Origin", "Distance"}, null, new String[] {"Distance"});
DataInfo dinfo = new DataInfo(
sfr, // train
null, // valid
1, // num responses
false, // DON'T use all factor levels
DataInfo.TransformType.STANDARDIZE, // predictor transform
DataInfo.TransformType.NONE, // response transform
true, // skip missing
false, // impute missing
false, // missing bucket
false, // weight
false, // offset
false, // fold
interactionSpec // interaction spec
);
Assert.assertEquals(fr.vec("Origin").domain().length, dinfo.coefNames().length);
String[] expected = new String[dinfo.coefNames().length];
for (int i = 0; i < expected.length; i++)
expected[i] = "Origin_Distance." + sfr.vec("Origin").domain()[i];
Assert.assertArrayEquals(expected, dinfo.coefNames());
dinfo.dropInteractions();
dinfo.remove();
} finally {
Scope.exit();
}
}
@Test public void testInteractionsSkip1stFactor() {
try {
Scope.enter();
Frame fr = Scope.track(parse_test_file(Key.make("a.hex"), "smalldata/airlines/allyears2k_headers.zip"));
Frame sfr = fr.subframe(new String[]{"Origin", "Distance", "IsDepDelayed"});
Model.InteractionSpec interactionSpec = Model.InteractionSpec.create(
new String[]{"Origin", "Distance"}, null, new String[]{"Origin"});
DataInfo dinfo = new DataInfo(
sfr, // train
null, // valid
1, // num responses
false, // DON'T use all factor levels
DataInfo.TransformType.STANDARDIZE, // predictor transform
DataInfo.TransformType.NONE, // response transform
true, // skip missing
false, // impute missing
false, // missing bucket
false, // weight
false, // offset
false, // fold
interactionSpec // interaction spec
);
// Check that we get correct expanded coefficients and "Distance" is not dropped
Assert.assertEquals(fr.vec("Origin").domain().length, dinfo.coefNames().length);
String[] expected = new String[dinfo.coefNames().length];
expected[expected.length - 1] = "Distance";
for (int i = 0; i < expected.length - 1; i++)
expected[i] = "Origin_Distance." + fr.vec("Origin").domain()[i + 1];
Assert.assertArrayEquals(expected, dinfo.coefNames());
// Check that we can look-up "Categorical Id" for valid levels
for (int j = /*don't use all factor levels*/ 1; j < dinfo._adaptedFrame.vec(0).domain().length; j++) {
if (dinfo.getCategoricalIdFromInteraction(0, j) < 0)
Assert.fail("Categorical value should be recognized: " + j);
}
// Check that we get "mode" for unknown level
dinfo._valid = true;
Assert.assertEquals(fr.vec("Origin").mode(),
dinfo.getCategoricalIdFromInteraction(0, dinfo._adaptedFrame.vec(0).domain().length));
dinfo.dropInteractions();
dinfo.remove();
} finally {
Scope.exit();
}
}
@Test
public void testGetCategoricalIdFromInteraction() {
try {
Scope.enter();
Frame fr = Scope.track(parse_test_file(Key.make("a.hex"), "smalldata/airlines/allyears2k_headers.zip"));
Frame sfr = fr.subframe(new String[]{"Origin", "Distance", "IsDepDelayed"});
Model.InteractionSpec interactionSpec = Model.InteractionSpec.create(
new String[]{"Origin", "Distance"}, null, new String[]{"Origin"});
DataInfo dinfo = new DataInfo(
sfr, // train
null, // valid
1, // num responses
false, // DON'T use all factor levels
DataInfo.TransformType.STANDARDIZE, // predictor transform
DataInfo.TransformType.NONE, // response transform
true, // skip missing
false, // impute missing
false, // missing bucket
false, // weight
false, // offset
false, // fold
interactionSpec // interaction spec
);
// Check that we can look-up "Categorical Id" for valid levels
for (int j = /*don't use all factor levels*/ 1; j < dinfo._adaptedFrame.vec(0).domain().length; j++) {
if (dinfo.getCategoricalIdFromInteraction(0, j) < 0)
Assert.fail("Categorical value should be recognized: " + j);
}
// Check that we get "mode" for unknown level
dinfo._valid = true;
Assert.assertEquals(fr.vec("Origin").mode(),
dinfo.getCategoricalIdFromInteraction(0, dinfo._adaptedFrame.vec(0).domain().length));
dinfo.dropInteractions();
dinfo.remove();
} finally {
Scope.exit();
}
}
// @Test public void personalChecker() {
// final Frame gold = parse_test_file(Key.make("gold"), "/Users/spencer/Desktop/ffff.csv");
// Frame fr = parse_test_file(Key.make("a.hex"), "/Users/spencer/Desktop/iris.csv");
// fr.swap(3,4);
// DataInfo di0=null;
// try {
// di0 = new DataInfo(
// fr.clone(), // train
// null, // valid
// 1, // num responses
// false, // use all factor levels
// DataInfo.TransformType.STANDARDIZE, // predictor transform
// DataInfo.TransformType.NONE, // response transform
// true, // skip missing
// false, // impute missing
// false, // missing bucket
// false, // weight
// false, // offset
// false, // fold
// new String[]{"Species", "Sepal.Length", "Petal.Length"} // interactions
// );
// final DataInfo di=di0;
// new MRTask() {
// @Override public void map(Chunk[] cs) {
// DataInfo.Row[] sparseRows = di.extractSparseRows(cs);
// for(int i=0;i<cs[0]._len;++i) {
//// di.extractDenseRow(cs, i, r);
// DataInfo.Row r = sparseRows[i];
// int idx=1;
// for (int j = di.numStart(); j < di.fullN(); ++j) {
// double goldValue = gold.vec(idx++).at(i+cs[0].start());
// double thisValue = r.get(j) - (di._normSub[j - di.numStart()] * di._normMul[j-di.numStart()]);
// double diff = Math.abs(goldValue - thisValue);
// if( diff > 1e-12 )
// throw new RuntimeException("bonk");
// }
// }
// }
// }.doAll(di0._adaptedFrame);
// } finally {
// fr.delete();
// gold.delete();
// if( di0!=null ) {
// di0.dropInteractions();
// di0.remove();
// }
// }
// }
private static void printVals(DataInfo di, DataInfo.Row denseRow, DataInfo.Row sparseRow) {
System.out.println("col|dense|sparse|sparseScaled");
double sparseScaled;
String line;
for(int i=0;i<di.fullN();++i) {
sparseScaled = sparseRow.get(i);
if( i>=di.numStart() )
sparseScaled -= (di._normSub[i - di.numStart()] * di._normMul[i-di.numStart()]);
line = i+"|"+denseRow.get(i)+"|"+sparseRow.get(i)+"|"+sparseScaled;
if( Math.abs(denseRow.get(i)-sparseScaled) > 1e-14 )
System.out.println(">" + line + "<");
}
}
private static void checker(final DataInfo di, final boolean standardize) {
new MRTask() {
@Override public void map(Chunk[] cs) {
if(cs[0].start() == 23889){
System.out.println("haha");
}
DataInfo.Row[] sparseRows = di.extractSparseRows(cs);
DataInfo.Row r = di.newDenseRow();
for(int i=0;i<cs[0]._len;++i) {
di.extractDenseRow(cs, i, r);
for (int j = 0; j < di.fullN(); ++j) {
double sparseDoubleScaled = sparseRows[i].get(j); // extracting sparse rows does not do the full scaling!!
if( j>=di.numStart() ) { // finish scaling the sparse value
sparseDoubleScaled -= (standardize?(di._normSub[j - di.numStart()] * di._normMul[j-di.numStart()]):0);
}
if( r.isBad() || sparseRows[i].isBad() ) {
if( sparseRows[i].isBad() && r.isBad() ) continue; // both bad OK
throw new RuntimeException("dense row was "+(r.isBad()?"bad":"not bad") + "; but sparse row was "+(sparseRows[i].isBad()?"bad":"not bad"));
}
if( Math.abs(r.get(j)-sparseDoubleScaled) > 1e-10 ) {
printVals(di,r,sparseRows[i]);
throw new RuntimeException("Row mismatch on row " + i);
}
}
}
}
}.doAll(di._adaptedFrame);
}
}
|
alias d='docker $*'
alias d-c='docker-compose $*'
# Kill all running containers.
alias dockerkillall='docker kill $(docker ps -q)'
# Delete all stopped containers.
alias dockercleanc='printf "\n>>> Deleting stopped containers\n\n" && docker rm $(docker ps -a -q)'
# Delete all untagged images.
alias dockercleani='printf "\n>>> Deleting untagged images\n\n" && docker rmi $(docker images -q -f dangling=true)'
# Delete all stopped containers and untagged images.
alias dockerclean='dockercleanc || true && dockercleani' |
import React from 'react';
import { Link } from 'react-router-dom';
import * as Icon from 'react-feather';
const OurServices = () => {
return (
<div className="services-area-two pt-80 pb-50 bg-f7fafd">
<div className="container">
<div className="section-title">
<h2>Our Services</h2>
<div className="bar"></div>
<p>
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do
eiusmod tempor incididunt ut labore et dolore magna aliqua.
</p>
</div>
<div className="row">
<div className="col-lg-4 col-md-6 col-sm-6">
<div className="single-services-box">
<div className="icon">
<Icon.Settings />
</div>
<h3>
<Link to="/service-details">
<a>Incredible Infrastructure</a>
</Link>
</h3>
<p>
Lorem ipsum dolor amet, adipiscing, sed do eiusmod tempor
incididunt ut labore dolore magna aliqua.
</p>
</div>
</div>
<div className="col-lg-4 col-md-6 col-sm-6">
<div className="single-services-box">
<div className="icon">
<Icon.Mail />
</div>
<h3>
<Link to="/service-details">
<a>Email Notifications</a>
</Link>
</h3>
<p>
Lorem ipsum dolor amet, adipiscing, sed do eiusmod tempor
incididunt ut labore dolore magna aliqua.
</p>
</div>
</div>
<div className="col-lg-4 col-md-6 col-sm-6">
<div className="single-services-box">
<div className="icon">
<Icon.Bell />
</div>
<h3>
<Link to="/service-details">
<a>Best Analytics Audits</a>
</Link>
</h3>
<p>
Lorem ipsum dolor amet, adipiscing, sed do eiusmod tempor
incididunt ut labore dolore magna aliqua.
</p>
</div>
</div>
<div className="col-lg-4 col-md-6 col-sm-6">
<div className="single-services-box">
<div className="icon bg-c679e3">
<Icon.Grid />
</div>
<h3>
<Link to="/service-details">
<a>Simple Dashboard</a>
</Link>
</h3>
<p>
Lorem ipsum dolor amet, adipiscing, sed do eiusmod tempor
incididunt ut labore dolore magna aliqua.
</p>
</div>
</div>
<div className="col-lg-4 col-md-6 col-sm-6">
<div className="single-services-box">
<div className="icon bg-c679e3">
<Icon.Info />
</div>
<h3>
<Link to="/service-details">
<a>Information Retrieval</a>
</Link>
</h3>
<p>
Lorem ipsum dolor amet, adipiscing, sed do eiusmod tempor
incididunt ut labore dolore magna aliqua.
</p>
</div>
</div>
<div className="col-lg-4 col-md-6 col-sm-6">
<div className="single-services-box">
<div className="icon bg-c679e3">
<Icon.HardDrive />
</div>
<h3>
<Link to="/service-details">
<a>Deep Technical SEO</a>
</Link>
</h3>
<p>
Lorem ipsum dolor amet, adipiscing, sed do eiusmod tempor
incididunt ut labore dolore magna aliqua.
</p>
</div>
</div>
<div className="col-lg-4 col-md-6 col-sm-6">
<div className="single-services-box">
<div className="icon bg-eb6b3d">
<Icon.MousePointer />
</div>
<h3>
<Link to="/service-details">
<a>Drag & Drop Functionality</a>
</Link>
</h3>
<p>
Lorem ipsum dolor amet, adipiscing, sed do eiusmod tempor
incididunt ut labore dolore magna aliqua.
</p>
</div>
</div>
<div className="col-lg-4 col-md-6 col-sm-6">
<div className="single-services-box">
<div className="icon bg-eb6b3d">
<Icon.Bell />
</div>
<h3>
<Link to="/service-details">
<a>Deadline Reminders</a>
</Link>
</h3>
<p>
Lorem ipsum dolor amet, adipiscing, sed do eiusmod tempor
incididunt ut labore dolore magna aliqua.
</p>
</div>
</div>
<div className="col-lg-4 col-md-6 col-sm-6">
<div className="single-services-box">
<div className="icon bg-eb6b3d">
<Icon.Send />
</div>
<h3>
<Link to="/service-details">
<a>Modern Keyword Analysis</a>
</Link>
</h3>
<p>
Lorem ipsum dolor amet, adipiscing, sed do eiusmod tempor
incididunt ut labore dolore magna aliqua.
</p>
</div>
</div>
</div>
</div>
</div>
);
};
export default OurServices;
|
<gh_stars>0
package ru.skarpushin.swingpm.tools.edt;
import javax.swing.SwingUtilities;
import org.apache.log4j.Logger;
public class EdtInvokerSimpleImpl implements EdtInvoker {
private static Logger log = Logger.getLogger(EdtInvokerSimpleImpl.class);
@Override
public void invoke(Runnable task) {
if (SwingUtilities.isEventDispatchThread()) {
task.run();
return;
}
try {
SwingUtilities.invokeAndWait(task);
} catch (InterruptedException ie) {
log.trace("catch InterruptedException, considered as non-critical case, no exception being propogated", ie);
} catch (Throwable t) {
throw new RuntimeException("Failed to execute task on EDT", t);
}
}
}
|
#!/bin/bash
USER="cac_user"
useradd -m $USER
echo "umask 022" >> /home/$USER/.bashrc
|
#!/bin/bash
# Store MySQL username and password in variables
MYSQL_USERNAME="<your_username>"
MYSQL_PASSWORD="<your_password>"
# Establish a connection with the database
mysql -u $MYSQL_USERNAME -p$MYSQL_PASSWORD <<-EOF
SELECT * FROM MyTable;
EOF |
# Copyright (c) 2013 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#network interface on which to limit traffic
IF="eth0"
#limit of the network interface in question
LINKCEIL="1gbit"
#limit outbound DigitalGoldCoin protocol traffic to this rate
LIMIT="160kbit"
#defines the address space for which you wish to disable rate limiting
LOCALNET="192.168.0.0/16"
#delete existing rules
tc qdisc del dev ${IF} root
#add root class
tc qdisc add dev ${IF} root handle 1: htb default 10
#add parent class
tc class add dev ${IF} parent 1: classid 1:1 htb rate ${LINKCEIL} ceil ${LINKCEIL}
#add our two classes. one unlimited, another limited
tc class add dev ${IF} parent 1:1 classid 1:10 htb rate ${LINKCEIL} ceil ${LINKCEIL} prio 0
tc class add dev ${IF} parent 1:1 classid 1:11 htb rate ${LIMIT} ceil ${LIMIT} prio 1
#add handles to our classes so packets marked with <x> go into the class with "... handle <x> fw ..."
tc filter add dev ${IF} parent 1: protocol ip prio 1 handle 1 fw classid 1:10
tc filter add dev ${IF} parent 1: protocol ip prio 2 handle 2 fw classid 1:11
#delete any existing rules
#disable for now
#ret=0
#while [ $ret -eq 0 ]; do
# iptables -t mangle -D OUTPUT 1
# ret=$?
#done
#limit outgoing traffic to and from port 16783. but not when dealing with a host on the local network
# (defined by $LOCALNET)
# --set-mark marks packages matching these criteria with the number "2"
# these packages are filtered by the tc filter with "handle 2"
# this filter sends the packages into the 1:11 class, and this class is limited to ${LIMIT}
iptables -t mangle -A OUTPUT -p tcp -m tcp --dport 16783 ! -d ${LOCALNET} -j MARK --set-mark 0x2
iptables -t mangle -A OUTPUT -p tcp -m tcp --sport 16783 ! -d ${LOCALNET} -j MARK --set-mark 0x2
|
<gh_stars>0
/*******************************************************************************
* Copyright 2015 InfinitiesSoft Solutions Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package com.infinities.skyport.async.service.network;
import java.util.Map;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.dasein.cloud.AccessControlledService;
import org.dasein.cloud.CloudException;
import org.dasein.cloud.InternalException;
import org.dasein.cloud.OperationNotSupportedException;
import org.dasein.cloud.ResourceStatus;
import org.dasein.cloud.Tag;
import org.dasein.cloud.identity.ServiceAction;
import org.dasein.cloud.network.Direction;
import org.dasein.cloud.network.Firewall;
import org.dasein.cloud.network.FirewallConstraints;
import org.dasein.cloud.network.FirewallCreateOptions;
import org.dasein.cloud.network.FirewallRule;
import org.dasein.cloud.network.FirewallRuleCreateOptions;
import org.dasein.cloud.network.NetworkFirewallCapabilities;
import org.dasein.cloud.network.NetworkFirewallSupport;
import org.dasein.cloud.network.Permission;
import org.dasein.cloud.network.Protocol;
import org.dasein.cloud.network.RuleTarget;
import com.infinities.skyport.async.AsyncResult;
public interface AsyncNetworkFirewallSupport extends AccessControlledService {
static public final ServiceAction ANY = new ServiceAction("NFW:ANY");
static public final ServiceAction ASSOCIATE = new ServiceAction("NFW:ASSOCIATE");
static public final ServiceAction AUTHORIZE = new ServiceAction("NFW:AUTHORIZE");
static public final ServiceAction CREATE_FIREWALL = new ServiceAction("NFW:CREATE_FIREWALL");
static public final ServiceAction GET_FIREWALL = new ServiceAction("NFW:GET_FIREWALL");
static public final ServiceAction LIST_FIREWALL = new ServiceAction("NFW:LIST_FIREWALL");
static public final ServiceAction REMOVE_FIREWALL = new ServiceAction("NFW:REMOVE_FIREWALL");
static public final ServiceAction REVOKE = new ServiceAction("NFW:REVOKE");
/**
* Associates the specified firewall with the specified subnet.
*
* @param firewallId
* the firewall to be associated
* @param withSubnetId
* the subnet with which the firewall is to be associated
* @throws CloudException
* an error occurred with the cloud provider while performing
* the operation
* @throws InternalException
* an error occurred locally independent of any events in the
* cloud
* @throws OperationNotSupportedException
* you cannot associate network firewalls at the subnet level
*/
public AsyncResult<Void> associateWithSubnet(@Nonnull String firewallId, @Nonnull String withSubnetId)
throws CloudException, InternalException;
/**
* Provides positive authorization for the specified firewall rule with the
* specified precedence. Any call to this method should result in an
* override of any previous authorizations. For this method, the source
* endpoint is the source for the traffic and the destination endpoint is
* where the traffic terminates. For INGRESS rules, the destination endpoint
* will thus be resources protected by this firewall and for EGRESS rules
* the destination endpoint is one or more external resources.
*
* @param firewallId
* the unique, cloud-specific ID for the firewall being targeted
* by the new rule
* @param direction
* the direction of the traffic governing the rule
* @param permission
* ALLOW or DENY
* @param sourceEndpoint
* the source endpoint for this rule
* @param protocol
* the protocol (tcp/udp/icmp) supported by this rule
* @param destinationEndpoint
* the destination endpoint to specify for this rule
* @param beginPort
* the beginning of the port range to be allowed, inclusive
* @param endPort
* the end of the port range to be allowed, inclusive
* @param precedence
* the precedence of this rule with respect to others (-1 means
* the default rule)
* @return the provider ID of the new rule
* @throws CloudException
* an error occurred with the cloud provider establishing the
* rule
* @throws InternalException
* an error occurred locally trying to establish the rule
* @throws OperationNotSupportedException
* the specified direction, target, or permission are not
* supported
*/
public @Nonnull AsyncResult<String> authorize(@Nonnull String firewallId, @Nonnull Direction direction,
@Nonnull Permission permission, @Nonnull RuleTarget sourceEndpoint, @Nonnull Protocol protocol,
@Nonnull RuleTarget destinationEndpoint, int beginPort, int endPort, int precedence) throws CloudException,
InternalException;
/**
* Provides positive authorization for the specified firewall rule with the
* specified precedence. Any call to this method should result in an
* override of any previous authorizations. For this method, the source
* endpoint is the source for the traffic and the destination endpoint is
* where the traffic terminates. For INGRESS rules, the destination endpoint
* will thus be resources protected by this firewall and for EGRESS rules
* the destination endpoint is one or more external resources.
*
* @param firewallId
* the unique, cloud-specific ID for the firewall being targeted
* by the new rule
* @param options
* the firewall create options that define how the rule should be
* created
* @return the provider ID of the new rule
* @throws CloudException
* an error occurred with the cloud provider establishing the
* rule
* @throws InternalException
* an error occurred locally trying to establish the rule
* @throws OperationNotSupportedException
* the specified direction, target, or permission are not
* supported
*/
public @Nonnull AsyncResult<String> authorize(@Nonnull String firewallId, @Nonnull FirewallRuleCreateOptions options)
throws CloudException, InternalException;
/**
* Creates a new firewall based on the specified creation options.
*
* @param options
* the options to be used in creating the firewall
* @return the unique provider ID identifying the newly created firewall
* @throws CloudException
* an error occurred with the cloud provider while performing
* the operation
* @throws InternalException
* an error occurred locally independent of any events in the
* cloud
* @throws UnsupportedOperationException
* this cloud doesn't support firewall creation using the
* specified options
*/
public @Nonnull AsyncResult<String> createFirewall(@Nonnull FirewallCreateOptions options) throws InternalException,
CloudException;
/**
* Identifies the constraints and values currently active for the specified
* firewall. The constrained fields should match the fields defined as being
* constrained in {@link #getFirewallConstraintsForCloud()}.
*
* @param firewallId
* the ID for which you are seeking active constraints
* @return a map of constraints to the value on which a given rule value is
* constrained
* @throws InternalException
* an error occurred inside Dasein Cloud processing the request
* @throws CloudException
* an error occurred communicating with the cloud provider in
* assembling the list
*/
public @Nullable AsyncResult<Map<FirewallConstraints.Constraint, Object>> getActiveConstraintsForFirewall(
@Nonnull String firewallId) throws InternalException, CloudException;
/**
* Provides access to meta-data about load balancer capabilities in the
* current region of this cloud.
*
* @return a description of the features supported by this region of this
* cloud
* @throws InternalException
* an error occurred within the Dasein Cloud API implementation
* @throws CloudException
* an error occurred within the cloud provider
*/
public @Nonnull NetworkFirewallCapabilities getCapabilities() throws CloudException, InternalException;
/**
* Provides the full firewall data for the specified firewall.
*
* @param firewallId
* the unique ID of the desired firewall
* @return the firewall state for the specified firewall instance
* @throws InternalException
* an error occurred locally independent of any events in the
* cloud
* @throws CloudException
* an error occurred with the cloud provider while performing
* the operation
*/
public @Nullable AsyncResult<Firewall> getFirewall(@Nonnull String firewallId) throws InternalException, CloudException;
/**
* Identifies whether or not the current account is subscribed to network
* firewall services in the current region.
*
* @return true if the current account is subscribed to network firewall
* services for the current region
* @throws CloudException
* an error occurred with the cloud provider while determining
* subscription status
* @throws InternalException
* an error occurred in the Dasein Cloud implementation while
* determining subscription status
*/
public boolean isSubscribed() throws CloudException, InternalException;
/**
* Lists the status for all network firewalls in the current provider
* context.
*
* @return the status for all network firewalls in the current account
* @throws InternalException
* an error occurred locally independent of any events in the
* cloud
* @throws CloudException
* an error occurred with the cloud provider while performing
* the operation
*/
public @Nonnull AsyncResult<Iterable<ResourceStatus>> listFirewallStatus() throws InternalException, CloudException;
/**
* Lists all network firewalls in the current provider context.
*
* @return a list of all network firewalls in the current provider context
* @throws InternalException
* an error occurred locally independent of any events in the
* cloud
* @throws CloudException
* an error occurred with the cloud provider while performing
* the operation
*/
public @Nonnull AsyncResult<Iterable<Firewall>> listFirewalls() throws InternalException, CloudException;
/**
* Provides the rules supported by the named firewall ordered in order of
* precedence with the most important rule first.
* <em>Implementation note: natural sorting order for {@link FirewallRule} is low to
* high. If this cloud has 0 as a low priority, you should reverse the natural sort!</em>
*
* @param firewallId
* the unique ID of the firewall being queried
* @return all rules supported by the target firewall
* @throws InternalException
* an error occurred locally independent of any events in the
* cloud
* @throws CloudException
* an error occurred with the cloud provider while performing
* the operation
*/
public @Nonnull AsyncResult<Iterable<FirewallRule>> listRules(@Nonnull String firewallId) throws InternalException,
CloudException;
/**
* Removes one or more network firewalls from the system.
*
* @param firewallIds
* the unique IDs of the firewalls to be deleted
* @throws InternalException
* an error occurred locally independent of any events in the
* cloud
* @throws CloudException
* an error occurred with the cloud provider while performing
* the operation
*/
public AsyncResult<Void> removeFirewall(@Nonnull String... firewallIds) throws InternalException, CloudException;
/**
* Removes meta-data from a network firewall. If tag values are set, their
* removal is dependent on underlying cloud provider behavior. They may be
* removed only if the tag value matches or they may be removed regardless
* of the value.
*
* @param firewallId
* the firewall to update
* @param tags
* the meta-data tags to remove
* @throws CloudException
* an error occurred within the cloud provider
* @throws InternalException
* an error occurred within the Dasein Cloud API implementation
*/
public AsyncResult<Void> removeTags(@Nonnull String firewallId, @Nonnull Tag... tags) throws CloudException,
InternalException;
/**
* Removes meta-data from multiple network firewalls. If tag values are set,
* their removal is dependent on underlying cloud provider behavior. They
* may be removed only if the tag value matches or they may be removed
* regardless of the value.
*
* @param firewallIds
* the network firewalls to update
* @param tags
* the meta-data tags to remove
* @throws CloudException
* an error occurred within the cloud provider
* @throws InternalException
* an error occurred within the Dasein Cloud API implementation
*/
public AsyncResult<Void> removeTags(@Nonnull String[] firewallIds, @Nonnull Tag... tags) throws CloudException,
InternalException;
/**
* Revokes the uniquely identified firewall rule.
*
* @param providerFirewallRuleId
* the unique ID of the firewall rule
* @throws InternalException
* an error occurred locally independent of any events in the
* cloud
* @throws CloudException
* an error occurred with the cloud provider while performing
* the operation
*/
public AsyncResult<Void> revoke(@Nonnull String providerFirewallRuleId) throws InternalException, CloudException;
/**
* Updates meta-data for a network firewall with the new values. It will not
* overwrite any value that currently exists unless it appears in the tags
* you submit.
*
* @param firewallId
* the network firewall to update
* @param tags
* the meta-data tags to set
* @throws CloudException
* an error occurred within the cloud provider
* @throws InternalException
* an error occurred within the Dasein Cloud API implementation
*/
public AsyncResult<Void> updateTags(@Nonnull String firewallId, @Nonnull Tag... tags) throws CloudException,
InternalException;
/**
* Updates meta-data for multiple network firewalls with the new values. It
* will not overwrite any value that currently exists unless it appears in
* the tags you submit.
*
* @param firewallIds
* the network firewalls to update
* @param tags
* the meta-data tags to set
* @throws CloudException
* an error occurred within the cloud provider
* @throws InternalException
* an error occurred within the Dasein Cloud API implementation
*/
public AsyncResult<Void> updateTags(@Nonnull String[] firewallIds, @Nonnull Tag... tags) throws CloudException,
InternalException;
/**
* Set meta-data for a network firewall. Remove any tags that were not
* provided by the incoming tags, and add or overwrite any new or
* pre-existing tags.
*
* @param firewallId
* the network firewalls to set
* @param tags
* the meta-data tags to set
* @throws CloudException
* an error occurred within the cloud provider
* @throws InternalException
* an error occurred within the Dasein Cloud API implementation
*/
public AsyncResult<Void> setTags(@Nonnull String firewallId, @Nonnull Tag... tags) throws CloudException,
InternalException;
/**
* Set meta-data for multiple network firewalls. Remove any tags that were
* not provided by the incoming tags, and add or overwrite any new or
* pre-existing tags.
*
* @param firewallIds
* the networks firewalls to set
* @param tags
* the meta-data tags to set
* @throws CloudException
* an error occurred within the cloud provider
* @throws InternalException
* an error occurred within the Dasein Cloud API implementation
*/
public AsyncResult<Void> setTags(@Nonnull String[] firewallIds, @Nonnull Tag... tags) throws CloudException,
InternalException;
NetworkFirewallSupport getSupport();
}
|
class VectorSimilarity:
"""
VectorSimilarity class for calculating similarity between two vectors.
Attributes:
input_dim : ``int``
The dimension of the vectors. This is ``y.size()[-1]`` - the
length of the vector that will go into the similarity computation. We need this so we can
build weight vectors correctly.
"""
def __init__(self, input_dim):
self.input_dim = input_dim
def calculate_similarity(self, vector1, vector2):
"""
Calculate the similarity between two vectors.
Args:
vector1 : list
The first vector for similarity calculation.
vector2 : list
The second vector for similarity calculation.
Returns:
float
The similarity score between the two input vectors.
"""
if len(vector1) != self.input_dim or len(vector2) != self.input_dim:
raise ValueError("Input vectors must have the same dimension as input_dim")
dot_product = sum(v1 * v2 for v1, v2 in zip(vector1, vector2))
magnitude1 = sum(v ** 2 for v in vector1) ** 0.5
magnitude2 = sum(v ** 2 for v in vector2) ** 0.5
similarity = dot_product / (magnitude1 * magnitude2)
return similarity |
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
def click_download_button_android(driver):
# Define the CSS selector for the download button on the Android web page
DOWNLOAD_BUTTON_HREF_ANDROID = "your_css_selector_here"
# Wait for the download button to be clickable
download_button = WebDriverWait(driver, 10).until(
EC.element_to_be_clickable((By.CSS_SELECTOR, DOWNLOAD_BUTTON_HREF_ANDROID))
)
# Click the download button
download_button.click() |
#!/usr/bin/env bash
FUNC_NAME=kf_doorman
ENTRY_POINT=kf_doorman
ENV_VARS_FILE=environment/dev.env.yml
RUNTIME=python37
COMMAND="gcloud functions deploy ${FUNC_NAME} --entry-point ${ENTRY_POINT} --runtime ${RUNTIME} --env-vars-file ${ENV_VARS_FILE} --trigger-http"
echo $COMMAND
eval $COMMAND
|
#! /bin/bash
if locale -a | grep fr_FR; then
cp ${srcdir}/logfile_syslog_fr.0 logfile_syslog_fr.0
touch -t 200711030923 logfile_syslog_fr.0
run_test env LC_ALL=fr_FR.UTF-8 ${lnav_test} -n \
-c ";SELECT log_time FROM syslog_log" \
-c ":write-csv-to -" \
logfile_syslog_fr.0
check_output "french locale is not recognized" <<EOF
log_time
2007-08-19 11:08:37.000
EOF
fi
touch unreadable.log
chmod ugo-r unreadable.log
run_test ${lnav_test} -n unreadable.log
sed -i "" -e "s|/.*/unreadable.log|unreadable.log|g" `test_err_filename`
check_error_output "able to read an unreadable log file?" <<EOF
error: Permission denied -- 'unreadable.log'
EOF
run_test ${lnav_test} -n 'unreadable.*'
check_output "unreadable file was not skipped" <<EOF
EOF
run_test ./drive_logfile -f syslog_log ${srcdir}/logfile_syslog.0
on_error_fail_with "Didn't infer syslog log format?"
run_test ./drive_logfile -f tcsh_history ${srcdir}/logfile_tcsh_history.0
on_error_fail_with "Didn't infer tcsh-history log format?"
run_test ./drive_logfile -f access_log ${srcdir}/logfile_access_log.0
on_error_fail_with "Didn't infer access_log log format?"
run_test ./drive_logfile -f strace_log ${srcdir}/logfile_strace_log.0
on_error_fail_with "Didn't infer strace_log log format?"
run_test ./drive_logfile -f zblued_log ${srcdir}/logfile_blued.0
on_error_fail_with "Didn't infer blued_log that collides with syslog?"
run_test ./drive_logfile -f bro_http_log ${srcdir}/logfile_bro_http.log.0
on_error_fail_with "Didn't infer bro_http_log log format?"
run_test ./drive_logfile -f bro_conn_log ${srcdir}/logfile_bro_conn.log.0
on_error_fail_with "Didn't infer bro_conn_log log format?"
run_test ./drive_logfile ${srcdir}/logfile_empty.0
on_error_fail_with "Didn't handle empty log?"
cp ${srcdir}/logfile_syslog.0 logfile_syslog.0
touch -t 200711030923 logfile_syslog.0
run_test ./drive_logfile -t -f syslog_log logfile_syslog.0
check_output "Syslog timestamp interpreted incorrectly?" <<EOF
Nov 03 09:23:38 2007 -- 000
Nov 03 09:23:38 2007 -- 000
Nov 03 09:23:38 2007 -- 000
Nov 03 09:47:02 2007 -- 000
EOF
touch -t 200711030923 ${srcdir}/logfile_syslog.1
run_test ./drive_logfile -t -f syslog_log ${srcdir}/logfile_syslog.1
check_output "Syslog timestamp interpreted incorrectly for year end?" <<EOF
Dec 03 09:23:38 2006 -- 000
Dec 03 09:23:38 2006 -- 000
Dec 03 09:23:38 2006 -- 000
Jan 03 09:47:02 2007 -- 000
EOF
touch -t 200711030000 ${srcdir}/logfile_rollover.0
run_test ./drive_logfile -t -f generic_log ${srcdir}/logfile_rollover.0
check_output "Generic timestamp interpreted incorrectly for day rollover?" <<EOF
Nov 02 00:00:00 2007 -- 000
Nov 02 01:00:00 2007 -- 000
Nov 02 02:00:00 2007 -- 000
Nov 02 03:00:00 2007 -- 000
Nov 03 00:00:00 2007 -- 000
Nov 03 00:01:00 2007 -- 000
EOF
gzip -c ${srcdir}/logfile_syslog.1 > logfile_syslog.1.gz
run_test ./drive_logfile -t -f syslog_log logfile_syslog.1.gz
check_output "Syslog timestamp incorrect for gzipped file?" <<EOF
Dec 03 09:23:38 2006 -- 000
Dec 03 09:23:38 2006 -- 000
Dec 03 09:23:38 2006 -- 000
Jan 03 09:47:02 2007 -- 000
EOF
if [ "$BZIP2_SUPPORT" -eq 1 ] && [ x"$BZIP2_CMD" != x"" ] ; then
$BZIP2_CMD -z -c "${srcdir}/logfile_syslog.1" > logfile_syslog.1.bz2
touch -t 200711030923 logfile_syslog.1.bz2
run_test ./drive_logfile -t -f syslog_log logfile_syslog.1.bz2
check_output "bzip2 file not loaded?" <<EOF
Dec 03 09:23:38 2006 -- 000
Dec 03 09:23:38 2006 -- 000
Dec 03 09:23:38 2006 -- 000
Jan 03 09:47:02 2007 -- 000
EOF
fi
touch -t 201404061109 ${srcdir}/logfile_tcf.1
run_test ./drive_logfile -t -f tcf_log ${srcdir}/logfile_tcf.1
check_output "TCF timestamp interpreted incorrectly for hour wrap?" <<EOF
Apr 06 09:59:47 2014 -- 191
Apr 06 10:30:11 2014 -- 474
Apr 06 11:01:11 2014 -- 475
EOF
# The TCSH format converts to local time, so we need to specify a TZ
export TZ="UTC"
run_test ./drive_logfile -t -f tcsh_history ${srcdir}/logfile_tcsh_history.0
check_output "TCSH timestamp interpreted incorrectly?" <<EOF
Nov 02 17:59:26 2006 -- 000
Nov 02 17:59:26 2006 -- 000
Nov 02 17:59:45 2006 -- 000
Nov 02 17:59:45 2006 -- 000
EOF
run_test ./drive_logfile -t -f access_log ${srcdir}/logfile_access_log.0
check_output "access_log timestamp interpreted incorrectly?" <<EOF
Jul 20 22:59:26 2009 -- 000
Jul 20 22:59:29 2009 -- 000
Jul 20 22:59:29 2009 -- 000
EOF
run_test ./drive_logfile -t -f generic_log ${srcdir}/logfile_tai64n.0
check_output "tai64n timestamps interpreted incorrectly?" <<EOF
Sep 22 03:31:05 2005 -- 997
Sep 22 03:31:05 2005 -- 997
Sep 22 03:31:06 2005 -- 210
Sep 22 03:31:06 2005 -- 210
Sep 22 03:31:07 2005 -- 714
Sep 22 03:31:07 2005 -- 714
Sep 22 03:31:07 2005 -- 715
Sep 22 03:31:07 2005 -- 715
Sep 22 03:31:07 2005 -- 954
Sep 22 03:31:07 2005 -- 954
EOF
touch -t 200711030923 ${srcdir}/logfile_strace_log.0
run_test ./drive_logfile -t -f strace_log ${srcdir}/logfile_strace_log.0
check_output "strace_log timestamp interpreted incorrectly?" <<EOF
Nov 03 08:09:33 2007 -- 814
Nov 03 08:09:33 2007 -- 815
Nov 03 08:09:33 2007 -- 815
Nov 03 08:09:33 2007 -- 815
Nov 03 08:09:33 2007 -- 816
Nov 03 08:09:33 2007 -- 816
Nov 03 08:09:33 2007 -- 816
Nov 03 08:09:33 2007 -- 816
Nov 03 08:09:33 2007 -- 816
EOF
run_test ./drive_logfile -t -f epoch_log ${srcdir}/logfile_epoch.0
check_output "epoch_log timestamp interpreted incorrectly?" <<EOF
Apr 10 02:58:07 2015 -- 123
Apr 10 02:58:07 2015 -- 456
EOF
run_test ./drive_logfile -t -f epoch_log ${srcdir}/logfile_epoch.1
check_error_output "epoch" <<EOF
EOF
check_output "epoch_log timestamp interpreted incorrectly?" <<EOF
Apr 09 19:58:07 2015 -- 123
Apr 09 19:58:07 2015 -- 456
EOF
touch -t 201509130923 ${srcdir}/logfile_syslog_with_mixed_times.0
run_test ./drive_logfile -t -f syslog_log ${srcdir}/logfile_syslog_with_mixed_times.0
check_output "syslog_log with mixed times interpreted incorrectly?" <<EOF
Sep 13 00:58:45 2015 -- 000
Sep 13 00:59:30 2015 -- 000
Sep 13 01:23:54 2015 -- 000
Sep 13 03:12:04 2015 -- 000
Sep 13 03:12:04 2015 -- 000
Sep 13 03:12:04 2015 -- 000
Sep 13 03:12:04 2015 -- 000
Sep 13 03:12:58 2015 -- 000
Sep 13 03:46:03 2015 -- 000
Sep 13 03:46:03 2015 -- 000
Sep 13 03:46:03 2015 -- 000
Sep 13 03:46:03 2015 -- 000
Sep 13 03:46:03 2015 -- 000
EOF
##
run_test ./drive_logfile -v -f syslog_log ${srcdir}/logfile_syslog.0
check_output "Syslog level interpreted incorrectly?" <<EOF
error 0x0
info 0x0
error 0x0
info 0x0
EOF
run_test ./drive_logfile -v -f tcsh_history ${srcdir}/logfile_tcsh_history.0
check_output "TCSH level interpreted incorrectly?" <<EOF
info 0x0
info 0x80
info 0x0
info 0x80
EOF
run_test ./drive_logfile -v -f access_log ${srcdir}/logfile_access_log.0
check_output "access_log level interpreted incorrectly?" <<EOF
info 0x0
error 0x0
info 0x0
EOF
run_test ./drive_logfile -v -f strace_log ${srcdir}/logfile_strace_log.0
check_output "strace_log level interpreted incorrectly?" <<EOF
info 0x0
info 0x0
info 0x0
error 0x0
info 0x0
error 0x0
info 0x0
info 0x0
info 0x0
EOF
run_test ./drive_logfile -t -f generic_log ${srcdir}/logfile_generic.0
check_output "generic_log timestamp interpreted incorrectly?" <<EOF
Jul 02 10:22:40 2012 -- 672
Oct 08 16:56:38 2014 -- 344
EOF
run_test ./drive_logfile -v -f generic_log ${srcdir}/logfile_generic.0
check_output "generic_log level interpreted incorrectly?" <<EOF
debug 0x0
warning 0x0
EOF
run_test ./drive_logfile -v -f generic_log ${srcdir}/logfile_generic.1
check_output "generic_log (1) level interpreted incorrectly?" <<EOF
info 0x0
error 0x0
EOF
run_test ./drive_logfile -v -f generic_log ${srcdir}/logfile_generic.2
check_output "generic_log (2) level interpreted incorrectly?" <<EOF
error 0x0
error 0x0
EOF
touch -t 200711030923 ${srcdir}/logfile_glog.0
run_test ./drive_logfile -t -f glog_log ${srcdir}/logfile_glog.0
check_output "glog_log timestamp interpreted incorrectly?" <<EOF
May 17 15:04:22 2007 -- 619
May 17 15:04:22 2007 -- 619
May 17 15:04:22 2007 -- 619
May 17 15:04:22 2007 -- 619
May 17 15:04:22 2007 -- 619
May 17 15:04:22 2007 -- 619
May 17 15:04:22 2007 -- 619
EOF
run_test ./drive_logfile -v -f glog_log ${srcdir}/logfile_glog.0
check_output "glog_log level interpreted incorrectly?" <<EOF
error 0x0
info 0x0
info 0x0
warning 0x0
info 0x0
info 0x0
error 0x0
EOF
cp ${srcdir}/logfile_syslog.0 truncfile.0
chmod u+w truncfile.0
run_test ${lnav_test} -n \
-c ";update syslog_log set log_mark = 1 where log_line = 1" \
-c ":write-to truncfile.0" \
-c ":goto 1" \
truncfile.0
check_output "truncated log file not detected" <<EOF
Nov 3 09:23:38 veridian automount[16442]: attempting to mount entry /auto/opt
EOF
echo "Hi" | run_test ${lnav_test} -d /tmp/lnav.err -nt -w logfile_stdin.log
check_output "piping to stdin is not working?" <<EOF
2013-06-06T19:13:20.123 Hi
2013-06-06T19:13:20.123 ---- END-OF-STDIN ----
EOF
run_test ${lnav_test} -C ${srcdir}/logfile_bad_syslog.0
sed -i "" -e "s|/.*/logfile_bad_syslog.0|logfile_bad_syslog.0|g" `test_err_filename`
check_error_output "bad syslog line not found?" <<EOF
error:logfile_bad_syslog.0:2:line did not match format syslog_log/regex/std
error:logfile_bad_syslog.0:2: line -- Nov 3 09:23:38 veridian lookup for opt failed
error:logfile_bad_syslog.0:2:partial match -- Nov 3 09:23:38 veridian lookup for opt failed
EOF
run_test ${lnav_test} -C ${srcdir}/logfile_bad_access_log.0
sed -i "" -e "s|/.*/logfile_bad_access_log.0|logfile_bad_access_log.0|g" `test_err_filename`
check_error_output "bad access_log line not found?" <<EOF
error:logfile_bad_access_log.0:1:line did not match format access_log/regex/std
error:logfile_bad_access_log.0:1: line -- 192.168.202.254 [20/Jul/2009:22:59:29 +0000] "GET /vmw/vSphere/default/vmkboot.gz HTTP/1.0" 404 46210 "-" "gPXE/0.9.7"
error:logfile_bad_access_log.0:1:partial match -- 192.168.202.254
EOF
run_test ${lnav_test} -n -I ${test_dir} ${srcdir}/logfile_epoch.0
check_output "rewriting machine-oriented timestamp didn't work?" <<EOF
2015-04-10 02:58:07.123000 Hello, World!
2015-04-10 02:58:07.456000 Goodbye, World!
EOF
run_test ${lnav_test} -n -I ${test_dir} ${srcdir}/logfile_crlf.0
check_output "CR-LF line-endings not handled?" <<EOF
2012-07-02 10:22:40,672:DEBUG:foo bar baz
2014-10-08 16:56:38,344:WARN:foo bar baz
EOF
run_test ${lnav_test} -n -I ${test_dir} \
-c ';SELECT count(*) FROM haproxy_log' \
${srcdir}/logfile_haproxy.0
check_output "multi-pattern logs don't work?" <<EOF
count(*)
17
EOF
# XXX get this working...
# run_test ${lnav_test} -n -I ${test_dir} <(cat ${srcdir}/logfile_access_log.0)
#
# check_output "opening a FIFO didn't work?" <<EOF
# 192.168.202.254 - - [20/Jul/2009:22:59:26 +0000] "GET /vmw/cgi/tramp HTTP/1.0" 200 134 "-" "gPXE/0.9.7"
# 192.168.202.254 - - [20/Jul/2009:22:59:29 +0000] "GET /vmw/vSphere/default/vmkboot.gz HTTP/1.0" 404 46210 "-" "gPXE/0.9.7"
# 192.168.202.254 - - [20/Jul/2009:22:59:29 +0000] "GET /vmw/vSphere/default/vmkernel.gz HTTP/1.0" 200 78929 "-" "gPXE/0.9.7"
# EOF
|
python synthesis/learning.py experiments/configs/leipzig/config_apprentice.json --benchmark_dir experiments/data/qf_nia/leipzig --max_timeout 10 --num_iters 10 --iters_inc 10 --pop_size 1 --eval_dir experiments/eval/leipzig/ --smt_batch_size 100 --full_pass 10 --num_threads 30 --experiment_name leipzig_apprentice
|
<gh_stars>0
"use strict";
var arr = [1, 2, 4, 8, 16, 32, 64, 128];
var target = 82;
function threeSumClosest(nums, target) {
nums = nums.sort();
var ptrLow;
var ptrHigh;
var dist = Number.MAX_VALUE;
var answer;
for (var i = 0; i < nums.length - 2; i++) {
ptrLow = i + 1;
ptrHigh = nums.length - 1;
while (ptrLow < ptrHigh) {
var result = nums[i] + nums[ptrLow] + nums[ptrHigh];
if (result === target)
return target;
if (Math.abs(target - result) <= dist) {
dist = Math.abs(target - result);
answer = result;
}
if (result > target)
ptrHigh--;
else
ptrLow++;
}
}
return answer;
}
;
console.log(threeSumClosest(arr, target));
|
"use strict";
const ObjectID = require('bson-objectid');
const getDbCollection = require('../utils/get-db-collection');
const OPERATION_CODE_TARIFF = 'tariff';
const OPERATION_CODE_TOPUP = 'topup';
class AbstractAccounting {
ctx = null;
collection = null;
constructor(ctx, collection) {
this.ctx = ctx;
this.collection = collection;
}
async GetUserBalance(userId, operationCode = '') {
try {
const match = {};
userId = userId || '';
if (userId) {
match.userId = ObjectID(userId);
}
operationCode = operationCode || '';
if (operationCode) {
match.operationCode = operationCode;
}
const pipeline = [
{'$match': match},
{'$group': {'_id': '$userId', 'balance': {'$sum': '$amount'}}},
];
const options = {};
const r = await this.ctx.mongoTransaction(
this.collection,
'aggregate',
[
pipeline,
options
]
)
const result = await r.toArray();
if (!result || !result.length) {
return 0
}
return result[0].balance;
} catch (e) {
throw e;
}
}
async GetUserHistory(userId, operationCode = '', limit = 100, offset = 0) {
const match = {};
userId = userId || '';
if (userId) {
match.userId = ObjectID(userId);
}
operationCode = operationCode || '';
if (operationCode) {
match.operationCode = operationCode;
}
const options = {
sort: {_id: -1},
projection: {}
};
try {
const r = await this.ctx.mongoTransaction(
this.collection,
'find',
[
match,
options
]
)
const count = await r.count();
const items = await r.skip(offset).limit(limit).toArray();
return {
limit: limit,
offset: offset,
count: count,
items: items,
currency: 'USD',
}
} catch (err) {
throw e;
}
}
async AddUserOperation(userId, amount, operationCode, name, mask) {
const operation = {
userId: ObjectID(userId),
createDate: (new Date).toISOString(),
amount: amount,
name: name || '',
operationCode: operationCode || '',
mask: mask || '',
ip: this.ctx.request.ip || ''
}
try {
await this.ctx.mongoTransaction(
this.collection,
'insertOne',
[
operation
]
)
} catch (err) {
throw err
}
}
}
class AccountingUser extends AbstractAccounting {
constructor(ctx) {
super(ctx, getDbCollection.accountingUsers(ctx));
return this;
}
}
class AccountingInternal extends AbstractAccounting {
constructor(ctx) {
super(ctx, getDbCollection.accountingInternal(ctx));
return this;
}
}
module.exports = {
AccountingUser,
AccountingInternal,
OPERATION_CODE_TARIFF,
OPERATION_CODE_TOPUP,
}
|
<gh_stars>0
var searchData=
[
['graphptr',['GraphPtr',['../classarmnn_deserializer_1_1_deserializer.xhtml#a38c1f8ba8e51364802669c968cf98ff5',1,'armnnDeserializer::Deserializer::GraphPtr()'],['../classarmnn_onnx_parser_1_1_onnx_parser.xhtml#ac81510b21cb2b5719cce34474ad01675',1,'armnnOnnxParser::OnnxParser::GraphPtr()']]],
['graphuniqueptr',['GraphUniquePtr',['../classarmnn_1_1_i_backend_internal.xhtml#a5d9d4ef183078690e3306320dc74b945',1,'armnn::IBackendInternal']]]
];
|
#!/bin/bash
awk 'NR==10' file.txt
|
const round = (value, decimals) => {
return Number(Math.round(value+'e'+decimals)+'e-'+decimals);
} |
source /usr/local/share/chruby/chruby.sh
source /usr/local/share/chruby/auto.sh
chruby "$(chruby | sort | tail -1 | tr -d " *")" |
def days_in_month(year, month):
if month in [2, 4, 6, 9, 11]:
return 30
if month == 2:
if (year % 4 == 0 and year % 100 != 0) or year % 400 == 0:
return 29
return 28
return 31 |
// 1373. 2진수 8진수
// 2019.09.21
// 진법
#include<iostream>
#include<string>
using namespace std;
int main()
{
ios::sync_with_stdio(0);
cin.tie(0);
cout.tie(0);
string s;
cin >> s;
if (s.size() % 3 ==1)
{
s = "00" + s;
}
else if (s.size() % 3 == 2)
{
s = "0" + s;
}
for (int i = 0; i < s.size(); i += 3)
{
string tmp = s.substr(i, 3);
int x = 0;
int cnt = 1;
for (int j = 2; j >= 0; j--)
{
x += (tmp[j] - '0')*cnt;
cnt *= 2;
}
cout << x;
}
return 0;
}
|
import fetch from "node-fetch";
import { parseCookies } from "nookies";
import Head from "next/head";
import { dashboard } from "../../../../config.json";
import { openModal } from "../../../dashboard/components/modal";
import AddStoreItem from "../../../dashboard/components/modal/add-store-item";
import { useEffect, useState } from "react";
import { useRouter } from "next/router";
import AlertMessage from "../../../dashboard/components/AlertMessage";
import Logger from "../../../modules/Logger";
const Store = ({ guild, isAuth }) => {
const [message, setMessage] = useState(null);
const router = useRouter();
useEffect(() => {
if (!isAuth) {
return router.push("/login");
}
}, [router, isAuth]);
useEffect(() => {
setMessage(router.query?.message);
}, [router]);
async function deleteItem(name) {
try {
const data = await (
await fetch(
`${dashboard.dashboardUrl}/api/guilds/${guild.id}/store?name=${encodeURIComponent(name)}`,
{
method: "DELETE",
}
)
).json();
if (data.status === "success") {
router.push(`/dashboard/${guild.id}/store?message=${data.message}`);
}
setMessage(data?.error);
} catch (e) {
Logger.error("delete_store_item", e);
}
}
function addStoreItem() {
openModal("addStoreItem");
}
return (
<>
{message ? <AlertMessage type="success" message={message} /> : null}
<AddStoreItem guild={guild} />
<Head>
<title>{guild?.name} - Store / {dashboard.botName} Dashboard</title>
</Head>
<div className="page-title">
<h4>{guild?.name} - Store</h4>
<div>
<a className="btn btn-primary" href={`/dashboard/${guild.id}`}>
Return
</a>
<button className="btn btn-primary ml-5" onClick={addStoreItem}>
Add store item
</button>
</div>
</div>
{guild?.store?.length > 0 ? (
<table>
<thead>
<tr>
<th>Name</th>
<th>Price</th>
<th>Actions</th>
</tr>
</thead>
<tbody>
{guild?.store?.map((item, idx) => {
return (
<tr key={idx}>
<td className="cmd-response">{item.name}</td>
<td>{item.price}</td>
<td className="table-actions">
<button onClick={() => deleteItem(item.name)} className="btn btn-sm btn-red">
Delete
</button>
</td>
</tr>
);
})}
</tbody>
</table>
) : (
<p>This guid does not have any items in the store yet</p>
)}
</>
);
};
export async function getServerSideProps(ctx) {
const cookies = parseCookies(ctx);
const data = await (
await fetch(`${dashboard.dashboardUrl}/api/guilds/${ctx.query.id}`, {
headers: {
auth: cookies?.token,
},
})
).json();
return {
props: {
isAuth: data.error !== "invalid_token",
guild: data?.guild || {},
},
};
}
export default Store;
|
var express = require('express');
var passport = require('passport')
var router = express.Router();
// Handles Ajax request for user information if user is authenticated
router.get('/', function(req, res) {
console.log('get /user route');
// check if logged in
if(req.isAuthenticated()) {
// send back user object from database
var userInfo = {
username : req.user.username
};
res.send(userInfo);
} else {
// failure best handled on the server. do redirect here.
console.log('not logged in');
// should probably be res.sendStatus(403) and handled client-side, esp if this is an AJAX request (which is likely with AngularJS)
res.send(false);
}
});
router.get('/facebook',
passport.authenticate('facebook'));
router.get('/facebook/callback',
passport.authenticate('facebook', { failureRedirect: '/home' }),
function(req, res) {
// Successful authentication, redirect home.
res.redirect('/user');
});
// clear all server session information about this user
router.get('/logout', function(req, res) {
// Use passport's built-in method to log out the user
console.log('Logged out');
req.logOut();
res.sendStatus(200);
});
module.exports = router;
|
#!/usr/bin/env bats
load $BATS_TEST_DIRNAME/helper/common.bash
setup() {
setup_common
dolt sql <<SQL
create table a (x int, y int, primary key (y,x));
create table b (x int, y int, primary key (y,x), foreign key (y) references a(y) on update cascade on delete cascade);
insert into a values (4,0), (3,1), (2,2);
insert into b values (2,1), (4,2), (3,0);
SQL
}
teardown() {
assert_feature_version
teardown_common
}
@test "foreign-keys-invert-pk: test commit check pass" {
dolt commit -am "cm"
}
@test "foreign-keys-invert-pk: check referential integrity on merge" {
dolt commit -am "main"
dolt checkout -b feat
dolt sql <<SQL
set FOREIGN_KEY_CHECKS = 0;
insert into b values (1,3);
SQL
dolt commit -am "feat"
dolt checkout main
run dolt merge feat
run dolt constraints verify --all
[ "$status" -eq "1" ]
run dolt sql -q "SELECT * FROM dolt_constraint_violations" -r=csv
[ "$status" -eq "0" ]
[[ "$output" =~ "table,num_violations" ]] || false
[[ "$output" =~ "b,1" ]] || false
}
@test "foreign-keys-invert-pk: test bad insert" {
run dolt sql -q "insert into b values (1,3)"
[ "$status" -eq 1 ]
}
@test "foreign-keys-invert-pk: test update" {
dolt sql -q "update a set y = -1 where y = 0"
run dolt sql -q "select * from b" -r csv
[ "$status" -eq "0" ]
[[ "$output" =~ "x,y" ]] || false
[[ "$output" =~ "3,-1" ]] || false
[[ "$output" =~ "2,1" ]] || false
[[ "$output" =~ "4,2" ]] || false
}
@test "foreign-keys-invert-pk: test delete" {
dolt sql -q "delete from a where y = 0"
run dolt sql -q "select * from b" -r csv
[ "$status" -eq "0" ]
[[ "$output" =~ "x,y" ]] || false
[[ "$output" =~ "2,1" ]] || false
[[ "$output" =~ "4,2" ]] || false
}
|
package it.drbourbon.mywatchfaces;
import es.malvarez.mywatchfaces.AbstractWatchFaceSlpt;
import it.drbourbon.mywatchfaces.widget.FuzzyTextClock;
import it.drbourbon.mywatchfaces.widget.HugeDigitClock;
import it.drbourbon.mywatchfaces.widget.ThreeLines;
import it.drbourbon.mywatchfaces.widget.ThreeLinesStepsWidget;
/**
* Created by fabio on 21/05/17.
*/
public class ThreeLinesWatchFaceSplt extends AbstractWatchFaceSlpt {
public ThreeLinesWatchFaceSplt() {
super(
new ThreeLines(0),
new ThreeLinesStepsWidget()
);
}
@Override
protected void initWatchFaceConfig() {
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.