text stringlengths 2 1.04M | meta dict |
|---|---|
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("05.BooleanVariable")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("05.BooleanVariable")]
[assembly: AssemblyCopyright("Copyright © 2015")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("dd18b350-33d6-48e1-83ec-2ce9edd3b5dc")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| {
"content_hash": "ae0dc40be1c312f5d8cdcd2c10348d05",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 84,
"avg_line_length": 39.138888888888886,
"alnum_prop": 0.7459190915542938,
"repo_name": "Andro0/TelerikAcademy",
"id": "67d31584a0b5dc7bded8c3ebcef7bcd4fcc8ccac",
"size": "1412",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "C#1/Homework_Primitive Data Types and Variables/05.BooleanVariable/Properties/AssemblyInfo.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "683413"
},
{
"name": "CSS",
"bytes": "9105"
},
{
"name": "HTML",
"bytes": "41273"
}
],
"symlink_target": ""
} |
<?php
/**
* @author Orhan POLAT
*/
namespace Admin;
use System\Settings as SysSettings;
use Webim\View\Manager as View;
class ContentSettings {
/**
* Manager
*
* @var Admin\Manager
*/
protected static $manager;
/**
* Register current class and routes
*
* @param Admin\Manager $manager
*/
public static function register(Manager $manager) {
$manager->addRoute(array(
$manager->prefix . '/content',
$manager->prefix . '/content/settings'
), __CLASS__ . '::getIndex');
$manager->addRoute(array(
$manager->prefix . '/content',
$manager->prefix . '/content/settings'
), __CLASS__ . '::postIndex', 'POST');
$parent = $manager->addMenu(lang('admin.menu.system', 'Sistem'), $manager->prefix . '/content', lang('admin.menu.content', 'İçerik'), null, 'fa fa-edit');
$manager->addMenu(lang('admin.menu.system', 'Sistem'), $manager->prefix . '/content/settings', lang('admin.menu.settings', 'Ayarlar'), $parent, 'fa fa-cog');
static::$manager = $manager;
}
/**
* Get
*
* @param array $params
*
* @return string
*/
public function getIndex($params = array()) {
$manager = static::$manager;
$manager->set('caption', lang('admin.menu.settings', 'Ayarlar'));
$manager->breadcrumb($manager->prefix . '/content', lang('admin.menu.content', 'İçerik'));
$manager->breadcrumb($manager->prefix . '/content/settings', lang('admin.menu.settings', 'Ayarlar'));
return View::create('content.settings')->data($manager::data())->render();
}
/**
* Post
*
* @param array $params
*
* @return string
*/
public function postIndex($params = array()) {
$manager = static::$manager;
$manager->app->response->setContentType('json');
//Make settings part by part
$contacts = array();
$maps = array();
$socials = array(
'facebook' => input('social_facebook'),
'twitter' => input('social_twitter'),
'google_plus' => input('social_google_plus'),
'linkedin' => input('social_linkedin'),
'instagram' => input('social_instagram'),
'youtube' => input('social_youtube'),
'flickr' => input('social_flickr'),
'pinterest' => input('social_pinterest'),
'skype' => input('social_skype'),
'vimeo' => input('social_vimeo'),
'github' => input('social_github'),
'whatsapp' => input('social_whatsapp')
);
$google = array(
'map_key' => raw_input('google_map_key'),
'search_console' => raw_input('google_search_console'),
'analytics' => raw_input('google_analytics'),
'recaptcha_site_key' => input('google_recaptcha_site_key'),
'recaptcha_site_secret' => input('google_recaptcha_site_secret'),
'adsense' => raw_input('google_adsense')
);
$oauth = array(
'facebook' => array(
'app_id' => input('facebook_app_id'),
'app_secret' => input('facebook_app_secret')
),
'twitter' => array(
'api_key' => input('twitter_api_key'),
'api_secret' => input('twitter_api_secret'),
'access_token' => input('twitter_access_token'),
'access_token_secret' => input('twitter_access_token_secret')
)
);
foreach (input('contact_title') as $key => $contact) {
$contacts[] = array(
'title' => $contact,
'name' => array_get(input('contact_name'), $key),
'address' => array_get(input('contact_address'), $key),
'phone' => array_get(input('contact_phone'), $key),
'gsm' => array_get(input('contact_gsm'), $key),
'fax' => array_get(input('contact_fax'), $key),
'email' => array_get(input('contact_email'), $key),
'web' => array_get(input('contact_web'), $key)
);
}
foreach (input('map_title') as $key => $map) {
$maps[] = array(
'title' => $map,
'name' => array_get(input('map_name'), $key),
'lat' => array_get(input('map_lat'), $key),
'lon' => array_get(input('map_lon'), $key),
'zoom' => array_get(input('map_zoom'), $key),
'marker_lat' => array_get(input('map_marker_lat'), $key),
'marker_lon' => array_get(input('map_marker_lon'), $key),
'marker_content' => array_get(raw_input('map_marker_content'), $key)
);
}
//Settings container
$settings = array(
'contact' => $contacts,
'map' => $maps,
'social' => $socials,
'google' => $google,
'oauth' => $oauth,
'html_editor' => input('html_editor', 'default')
);
//Remove first
SysSettings::init()->remove('system', 'contact');
SysSettings::init()->remove('system', 'map');
return SysSettings::init()->saveAll('system', $settings)->forData();
}
} | {
"content_hash": "87a85b5ef01be186e900b5bcfcb7c4b7",
"timestamp": "",
"source": "github",
"line_count": 151,
"max_line_length": 161,
"avg_line_length": 31.370860927152318,
"alnum_prop": 0.5691365843360777,
"repo_name": "morsaken/webim-cms",
"id": "62020b4bf129eb50d47e24bb19a726fc8c822ac9",
"size": "4741",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/Admin/ContentSettings.php",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1913705"
},
{
"name": "HTML",
"bytes": "2410087"
},
{
"name": "JavaScript",
"bytes": "7545289"
},
{
"name": "Makefile",
"bytes": "285"
},
{
"name": "PHP",
"bytes": "366310"
},
{
"name": "TSQL",
"bytes": "18403"
}
],
"symlink_target": ""
} |
/***************************************************************************/
/* */
/* pfrsbit.h */
/* */
/* FreeType PFR bitmap loader (specification). */
/* */
/* Copyright 2002-2015 by */
/* David Turner, Robert Wilhelm, and Werner Lemberg. */
/* */
/* This file is part of the FreeType project, and may only be used, */
/* modified, and distributed under the terms of the FreeType project */
/* license, LICENSE.TXT. By continuing to use, modify, or distribute */
/* this file you indicate that you have read the license and */
/* understand and accept it fully. */
/* */
/***************************************************************************/
#ifndef __PFRSBIT_H__
#define __PFRSBIT_H__
#include "pfrobjs.h"
FT_BEGIN_HEADER
FT_LOCAL( FT_Error )
pfr_slot_load_bitmap( PFR_Slot glyph,
PFR_Size size,
FT_UInt glyph_index );
FT_END_HEADER
#endif /* __PFR_SBIT_H__ */
/* END */
| {
"content_hash": "7cc13695bc3ef6c304f8680b0ac86181",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 77,
"avg_line_length": 43.861111111111114,
"alnum_prop": 0.3077897403419886,
"repo_name": "yapingxin/saturn-gui-lib-workshop",
"id": "133eb2cf0d5e99ca3586d12f0dcbfdd7f424d091",
"size": "1579",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "FreeType/freetype-2.6.2/src/pfr/pfrsbit.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "8286"
},
{
"name": "C",
"bytes": "18974626"
},
{
"name": "C++",
"bytes": "2666689"
},
{
"name": "CMake",
"bytes": "104576"
},
{
"name": "CSS",
"bytes": "3042"
},
{
"name": "DIGITAL Command Language",
"bytes": "74190"
},
{
"name": "Gnuplot",
"bytes": "630"
},
{
"name": "Groff",
"bytes": "4882"
},
{
"name": "HTML",
"bytes": "2698820"
},
{
"name": "M4",
"bytes": "114055"
},
{
"name": "Makefile",
"bytes": "582662"
},
{
"name": "Objective-C",
"bytes": "25406"
},
{
"name": "Perl",
"bytes": "64219"
},
{
"name": "Python",
"bytes": "382829"
},
{
"name": "Shell",
"bytes": "1041344"
}
],
"symlink_target": ""
} |
DELIMITER $$
DROP procedure IF EXISTS rdebug_watch_variables $$
CREATE procedure rdebug_watch_variables()
DETERMINISTIC
READS SQL DATA
SQL SECURITY INVOKER
main_body: BEGIN
select
routine_schema, routine_name, variable_name, variable_type, variable_value
from
(select * from _rdebug_stack_state where worker_id = @_rdebug_recipient_id
order by stack_level desc limit 1) select_current_stack_state
join _rdebug_routine_variables using (routine_schema, routine_name)
join _rdebug_routine_variables_state using (worker_id, stack_level, routine_schema, routine_name, variable_name)
where
statement_id between variable_scope_id_start and variable_scope_id_end
order by
variable_name
;
END $$
DELIMITER ;
| {
"content_hash": "7b20310e24dd774f8254ea1b843d35c9",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 118,
"avg_line_length": 30.72,
"alnum_prop": 0.7265625,
"repo_name": "shlomi-noach/common_schema",
"id": "5ff4e51f71511ccd662a4ad871f3c4eceb62a241",
"size": "1010",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "routines/debug/rdebug_watch_variables.sql",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PLSQL",
"bytes": "15308"
},
{
"name": "PLpgSQL",
"bytes": "421983"
},
{
"name": "SQLPL",
"bytes": "9737"
},
{
"name": "Shell",
"bytes": "4650"
}
],
"symlink_target": ""
} |
USER_ARGS="$@"
OLDDIR=`pwd`
BIN_DIR=`dirname $0`
cd "${BIN_DIR}/.." && DEFAULT_GATLING_HOME=`pwd` && cd "${OLDDIR}"
GATLING_HOME="${GATLING_HOME:=${DEFAULT_GATLING_HOME}}"
GATLING_CONF="${GATLING_CONF:=$GATLING_HOME/conf}"
export GATLING_HOME GATLING_CONF
echo "GATLING_HOME is set to ${GATLING_HOME}"
JAVA_OPTS="-server -XX:+UseThreadPriorities -XX:ThreadPriorityPolicy=42 -Xms512M -Xmx512M -Xmn100M -XX:+HeapDumpOnOutOfMemoryError -XX:+AggressiveOpts -XX:+OptimizeStringConcat -XX:+UseFastAccessorMethods -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -Djava.net.preferIPv4Stack=true -Djava.net.preferIPv6Addresses=false ${JAVA_OPTS}"
COMPILER_OPTS="$JAVA_OPTS -Xss10M"
# Setup classpaths
COMMON_CLASSPATH="$GATLING_CONF:${JAVA_CLASSPATH}"
COMPILER_CLASSPATH="$GATLING_HOME/lib/zinc/*:$COMMON_CLASSPATH"
GATLING_CLASSPATH="$GATLING_HOME/lib/*:$GATLING_HOME/user-files:$COMMON_CLASSPATH"
# Build compilation classpath
COMPILATION_CLASSPATH=`find "$GATLING_HOME/lib" -maxdepth 1 -name "*.jar" -type f -exec printf :{} ';'`
# Run the compiler
java $COMPILER_OPTS -cp "$COMPILER_CLASSPATH" io.gatling.compiler.ZincCompiler -ccp "$COMPILATION_CLASSPATH" $USER_ARGS 2> /dev/null
# Run Gatling
java $JAVA_OPTS -cp "$GATLING_CLASSPATH" io.gatling.app.Gatling $USER_ARGS
| {
"content_hash": "942b10385826395576c1c301f1e99565",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 358,
"avg_line_length": 46.32142857142857,
"alnum_prop": 0.7517347725520431,
"repo_name": "viniciushcruz/gatling",
"id": "f126d2a2511e1e3003d2d2bb0226ce507a8dfd89",
"size": "1946",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "bin/gatling.sh",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4890"
},
{
"name": "Scala",
"bytes": "1707"
},
{
"name": "Shell",
"bytes": "3080"
}
],
"symlink_target": ""
} |
while getopts "h" opt
do
case $opt in
"h")
echo -e "The script is designed to work on the following systems:\n(1) centos\n(2) bclinux"
exit
;;
*)
echo "Wrong Options"
;;
esac
done
# all errors are fatal
set -e
# enable tracing
set -x
SCRIPT_PATH=$(dirname "$(readlink -f "$0")")
source "${SCRIPT_PATH}/installation-setup.sh"
source "${SCRIPT_PATH}/../versions.txt"
# List of packages to install to satisfy build dependencies
pkgs=""
mnl_dev_pkg="libmnl-devel"
# general
pkgs+=" zlib-devel"
pkgs+=" gettext-devel"
pkgs+=" libtool-ltdl-devel"
pkgs+=" libtool-ltdl"
pkgs+=" glib2-devel"
pkgs+=" bzip2"
pkgs+=" m4"
# for yum-config-manager
pkgs+=" yum-utils"
# runtime dependencies
pkgs+=" libuuid-devel"
pkgs+=" libmnl"
pkgs+=" ${mnl_dev_pkg}"
pkgs+=" libffi-devel"
pkgs+=" pcre-devel"
# qemu lite dependencies
pkgs+=" libattr-devel"
pkgs+=" libcap-devel"
pkgs+=" libcap-ng-devel"
pkgs+=" pixman-devel"
pkgs+=" gcc-c++"
source /etc/os-release
major_version=$(echo "${VERSION_ID}"|cut -d\. -f1)
if [ "${os_distribution}" = centos -o "${os_distribution}" = bclinux ]
then
distro="CentOS"
else
echo >&2 "ERROR: Unrecognised distribution: ${os_distribution}"
echo >&2 "ERROR: This script is designed to work on CentOS system and also applies to other variants of CentOS system."
exit 1
fi
site="http://download.opensuse.org"
dir="repositories/home:/clearcontainers:/clear-containers-3/${distro}_${major_version}"
repo_file="home:clearcontainers:clear-containers-3.repo"
cc_repo_url="${site}/${dir}/${repo_file}"
sudo yum -y update
eval sudo yum -y install "${pkgs}"
sudo yum groupinstall -y 'Development Tools'
pushd "${deps_dir}"
# Install pre-requisites for gcc
gmp_file="gmp-${gmp_version}.tar.bz2"
curl -L -O "https://gcc.gnu.org/pub/gcc/infrastructure/${gmp_file}"
compile gmp "${gmp_file}" "gmp-${gmp_version}"
mpfr_file="mpfr-${mpfr_version}.tar.bz2"
curl -L -O "https://gcc.gnu.org/pub/gcc/infrastructure/${mpfr_file}"
compile mpfr "${mpfr_file}" "mpfr-${mpfr_version}"
mpc_file="mpc-${mpc_version}.tar.gz"
curl -L -O "https://gcc.gnu.org/pub/gcc/infrastructure/${mpc_file}"
compile mpc "${mpc_file}" "mpc-${mpc_version}"
# Install glib
glib_setup
# Install json-glib
json_glib_setup
# Install gcc
gcc_setup
# Install qemu-lite
qemu_lite_setup
popd
# Install docker
sudo yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo
sudo yum -y install docker-ce
# Install Clear Containers components and their dependencies
sudo yum-config-manager --add-repo "${cc_repo_url}"
sudo yum -y install cc-runtime cc-proxy cc-shim linux-container clear-containers-image
# Override runtime configuration to use hypervisor from prefix_dir
# rather than the OBS default values.
sudo -E prefix_dir="${prefix_dir}" sed -i -e \
"s,^path = \"/usr/bin/qemu-system-x86_64\",path = \"${prefix_dir}/bin/qemu-system-x86_64\",g" \
/usr/share/defaults/clear-containers/configuration.toml
# Configure CC by default
service_dir="/etc/systemd/system/docker.service.d"
sudo mkdir -p "${service_dir}"
cat <<EOF|sudo tee "${service_dir}/clear-containers.conf"
[Service]
ExecStart=
ExecStart=/usr/bin/dockerd -D --add-runtime cc-runtime=/usr/bin/cc-runtime --default-runtime=cc-runtime
EOF
sudo systemctl daemon-reload
sudo systemctl restart docker
| {
"content_hash": "7a3a56c43940c7106892f04ac8ba7ce1",
"timestamp": "",
"source": "github",
"line_count": 132,
"max_line_length": 123,
"avg_line_length": 25.227272727272727,
"alnum_prop": 0.7033033033033033,
"repo_name": "clearcontainers/runtime",
"id": "dc1039cb6ae6ce044e06e2c219ef8164eba18b68",
"size": "4314",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "installation/centos-setup.sh",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "365914"
},
{
"name": "Makefile",
"bytes": "21316"
},
{
"name": "Shell",
"bytes": "49786"
}
],
"symlink_target": ""
} |
(function() {
'use strict';
var app = angular.module('dice.controllers.app',['dice.pick.controller','dice.modal.controller','dice.nav.controller','dice.factory.app','service.app','ui.bootstrap.modal']);
}).call(this);
| {
"content_hash": "08545ff93c1a6d1b764e0bf4f1905233",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 178,
"avg_line_length": 45,
"alnum_prop": 0.6933333333333334,
"repo_name": "jstacoder/angular-dice",
"id": "e7a40f357cb2ac7d1163ed69a7efe7b7a40233fe",
"size": "225",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "static/js/controllers/ctrls.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "21987"
},
{
"name": "JavaScript",
"bytes": "189435"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "PHP",
"bytes": "22610"
},
{
"name": "Python",
"bytes": "10895"
}
],
"symlink_target": ""
} |
.. _nucleo_l432kc_board:
ST Nucleo L432KC
################
Overview
********
The Nucleo L432KC board features an ARM Cortex-M4 based STM32L432KC MCU
with a wide range of connectivity support and configurations. Here are
some highlights of the Nucleo L432KC board:
- STM32 microcontroller in UFQFPN32 package
- Arduino Uno V3 connectivity
- On-board ST-LINK/V2-1 debugger/programmer with SWD connector
- Flexible board power supply:
- USB VBUS or external source(3.3V, 5V, 7 - 12V)
- Power management access point
- Three LEDs: USB communication (LD1), power LED (LD2), user LED (LD3)
- One push-button: RESET
.. image:: img/nucleo_l432kc.jpg
:width: 250px
:align: center
:height: 188px
:alt: Nucleo L432KC
More information about the board can be found at the `Nucleo L432KC website`_.
Hardware
********
The STM32L432KC SoC provides the following hardware IPs:
- Ultra-low-power with FlexPowerControl (down to 28 nA Standby mode and 84
|micro| A/MHz run mode)
- Core: ARM |reg| 32-bit Cortex |reg| -M4 CPU with FPU, frequency up to 80 MHz,
100DMIPS/1.25DMIPS/MHz (Dhrystone 2.1)
- Clock Sources:
- 32 kHz crystal oscillator for RTC (LSE)
- Internal 16 MHz factory-trimmed RC ( |plusminus| 1%)
- Internal low-power 32 kHz RC ( |plusminus| 5%)
- Internal multispeed 100 kHz to 48 MHz oscillator, auto-trimmed by
LSE (better than |plusminus| 0.25 % accuracy)
- 2 PLLs for system clock, USB, audio, ADC
- RTC with HW calendar, alarms and calibration
- Up to 3 capacitive sensing channels: support touchkey, linear and rotary touch sensors
- 11x timers:
- 1x 16-bit advanced motor-control
- 1x 32-bit and 2x 16-bit general purpose
- 2x 16-bit basic
- 2x low-power 16-bit timers (available in Stop mode)
- 2x watchdogs
- SysTick timer
- Up to 26 fast I/Os, most 5 V-tolerant
- Memories
- Up to 256 KB single bank Flash, proprietary code readout protection
- Up to 64 KB of SRAM including 16 KB with hardware parity check
- Quad SPI memory interface
- Rich analog peripherals (independent supply)
- 1x 12-bit ADC 5 MSPS, up to 16-bit with hardware oversampling, 200
|micro| A/MSPS
- 2x 12-bit DAC, low-power sample and hold
- 1x operational amplifiers with built-in PGA
- 2x ultra-low-power comparators
- 13x communication interfaces
- USB OTG 2.0 full-speed crystal less solution with LPM and BCD
- 1x SAIs (serial audio interface)
- 2x I2C FM+(1 Mbit/s), SMBus/PMBus
- 3x USARTs (ISO 7816, LIN, IrDA, modem)
- 2x SPIs (3x SPIs with the Quad SPI)
- CAN (2.0B Active)
- SWPMI single wire protocol master I/F
- IRTIM (Infrared interface)
- 14-channel DMA controller
- True random number generator
- CRC calculation unit, 96-bit unique ID
- Development support: serial wire debug (SWD), JTAG, Embedded Trace Macrocell*
More information about STM32L432KC can be found here:
- `STM32L432KC on www.st.com`_
- `STM32L432 reference manual`_
Supported Features
==================
The Zephyr nucleo_l432kc board configuration supports the following hardware features:
+-----------+------------+-------------------------------------+
| Interface | Controller | Driver/Component |
+===========+============+=====================================+
| NVIC | on-chip | nested vector interrupt controller |
+-----------+------------+-------------------------------------+
| UART | on-chip | serial port-polling; |
| | | serial port-interrupt |
+-----------+------------+-------------------------------------+
| PINMUX | on-chip | pinmux |
+-----------+------------+-------------------------------------+
| GPIO | on-chip | gpio |
+-----------+------------+-------------------------------------+
| I2C | on-chip | i2c |
+-----------+------------+-------------------------------------+
| PWM | on-chip | pwm |
+-----------+------------+-------------------------------------+
| CAN | on-chip | can |
+-----------+------------+-------------------------------------+
.. note:: CAN feature requires CAN transceiver
Other hardware features are not yet supported on this Zephyr port.
The default configuration can be found in the defconfig file:
``boards/arm/nucleo_l432kc/nucleo_l432kc_defconfig``
Connections and IOs
===================
Nucleo L432KC Board has 6 GPIO controllers. These controllers are responsible for pin muxing,
input/output, pull-up, etc.
Available pins:
---------------
.. image:: img/nucleo_l432kc_arduino_nano.png
:width: 960px
:align: center
:height: 720px
:alt: Nucleo L432KC Arduino connectors
For mode details please refer to `STM32 Nucleo-32 board User Manual`_.
Default Zephyr Peripheral Mapping:
----------------------------------
- UART_1_TX : PA9
- UART_1_RX : PA10
- UART_2_TX : PA2
- UART_2_RX : PA3
- I2C_1_SCL : PB6
- I2C_1_SDA : PB7
- PWM_2_CH1 : PA0
- LD3 : PB3
System Clock
------------
Nucleo L432KC System Clock could be driven by internal or external oscillator,
as well as main PLL clock. By default System clock is driven by PLL clock at 80MHz,
driven by 16MHz high speed internal oscillator.
Serial Port
-----------
Nucleo L432KC board has 3 U(S)ARTs. The Zephyr console output is assigned to UART2.
Default settings are 115200 8N1.
Programming and Debugging
*************************
Applications for the ``nucleo_l432kc`` board configuration can be built and
flashed in the usual way (see :ref:`build_an_application` and
:ref:`application_run` for more details).
Flashing
========
Nucleo L432KC board includes an ST-LINK/V2-1 embedded debug tool
interface. This interface is supported by the openocd version
included in the Zephyr SDK since v0.9.2.
Flashing an application to Nucleo L432KC
----------------------------------------
Connect the Nucleo L432KC to your host computer using the USB port,
then run a serial host program to connect with your Nucleo board.
.. code-block:: console
$ minicom -D /dev/ttyACM0
Now build and flash an application. Here is an example for
:ref:`hello_world`.
.. zephyr-app-commands::
:zephyr-app: samples/hello_world
:board: nucleo_l432kc
:goals: build flash
You should see the following message on the console:
.. code-block:: console
$ Hello World! arm
Debugging
=========
You can debug an application in the usual way. Here is an example for the
:ref:`hello_world` application.
.. zephyr-app-commands::
:zephyr-app: samples/hello_world
:board: nucleo_l432kc
:maybe-skip-config:
:goals: debug
.. _Nucleo L432KC website:
http://www.st.com/en/evaluation-tools/nucleo-l432kc.html
.. _STM32 Nucleo-32 board User Manual:
http://www.st.com/resource/en/user_manual/dm00231744.pdf
.. _STM32L432KC on www.st.com:
http://www.st.com/en/microcontrollers/stm32l432kc.html
.. _STM32L432 reference manual:
http://www.st.com/resource/en/reference_manual/dm00151940.pdf
| {
"content_hash": "0752a47b4c93bbb0c51d48bd98b125ff",
"timestamp": "",
"source": "github",
"line_count": 233,
"max_line_length": 93,
"avg_line_length": 30.394849785407725,
"alnum_prop": 0.6203049985879695,
"repo_name": "nashif/zephyr",
"id": "7fadf13c268ed16aedc52cc8e9f942bea74a21c4",
"size": "7082",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "boards/arm/nucleo_l432kc/doc/index.rst",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "411112"
},
{
"name": "BASIC",
"bytes": "592"
},
{
"name": "Batchfile",
"bytes": "110"
},
{
"name": "C",
"bytes": "29131455"
},
{
"name": "C++",
"bytes": "222578"
},
{
"name": "CMake",
"bytes": "828435"
},
{
"name": "EmberScript",
"bytes": "959"
},
{
"name": "Gherkin",
"bytes": "2014"
},
{
"name": "Haskell",
"bytes": "722"
},
{
"name": "Objective-C",
"bytes": "3377"
},
{
"name": "PLSQL",
"bytes": "303"
},
{
"name": "Perl",
"bytes": "214752"
},
{
"name": "Python",
"bytes": "1831845"
},
{
"name": "Shell",
"bytes": "92436"
},
{
"name": "SmPL",
"bytes": "36625"
},
{
"name": "Smalltalk",
"bytes": "1885"
},
{
"name": "Tcl",
"bytes": "5840"
},
{
"name": "Verilog",
"bytes": "6394"
}
],
"symlink_target": ""
} |
package com.microsoft.azure.eventprocessorhost;
import java.util.ArrayList;
import java.util.function.Consumer;
public class PrefabGeneralErrorHandler implements Consumer<ExceptionReceivedEventArgs> {
private ArrayList<String> errors = new ArrayList<String>();
ArrayList<String> getErrors() {
return this.errors;
}
int getErrorCount() {
return this.errors.size();
}
@Override
public void accept(ExceptionReceivedEventArgs e) {
this.errors.add("GENERAL: " + e.getHostname() + " " + e.getAction() + " " + e.getException().toString() + " " + e.getException().getMessage());
}
}
| {
"content_hash": "00a6af8aea347dec44da12a4067ed3b2",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 151,
"avg_line_length": 30.285714285714285,
"alnum_prop": 0.6823899371069182,
"repo_name": "navalev/azure-sdk-for-java",
"id": "801d8722959e913b7ad9dd76e70827a56d7bc036",
"size": "733",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "sdk/eventhubs/microsoft-azure-eventhubs-eph/src/test/java/com/microsoft/azure/eventprocessorhost/PrefabGeneralErrorHandler.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "7230"
},
{
"name": "CSS",
"bytes": "5411"
},
{
"name": "Groovy",
"bytes": "1570436"
},
{
"name": "HTML",
"bytes": "29221"
},
{
"name": "Java",
"bytes": "250218562"
},
{
"name": "JavaScript",
"bytes": "15605"
},
{
"name": "PowerShell",
"bytes": "30924"
},
{
"name": "Python",
"bytes": "42119"
},
{
"name": "Shell",
"bytes": "1408"
}
],
"symlink_target": ""
} |
<HTML><HEAD>
<TITLE>Review for Danger Island (1992) (TV)</TITLE>
<LINK REL="STYLESHEET" TYPE="text/css" HREF="/ramr.css">
</HEAD>
<BODY BGCOLOR="#FFFFFF" TEXT="#000000">
<H1 ALIGN="CENTER" CLASS="title"><A HREF="/Title?0104049">Danger Island (1992) (TV)</A></H1><H3 ALIGN=CENTER>reviewed by<BR><A HREF="/ReviewsBy?Brian+L.+Johnson">Brian L. Johnson</A></H3><HR WIDTH="40%" SIZE="4">
<PRE> DANGER ISLAND
A film review by Ken Johnson
Copyright 1992 Ken Johnson</PRE>
<P>97 min., not rated, Suspense/Action, 1992, Made-for-television
Director: Tommy Lee Wallace
Cast: Lisa Banes, Richard Beymer, Maria Celedonio, Gary Graham,
Kathy Ireland, Joe Lara, Christopher Pettiet, Beth Toussaint,
Eddie Valez, Nikki Cox, June Lockhart, Gina Malina Aurio</P>
<P> Several people are stranded on a tropical island after the private
plane they were in crash-landed in the ocean. The island, which seems
uninhabited, turns out not to be a vacation in paradise. Something in
the ocean attacks two of the people, which one of disappears soon after,
and the other one gets physical deformities. The rest of the people
travel through the island trying to find help, unaware of the strange
things that are going on around them.</P>
<P> DANGER ISLAND, which was made for the NBC television network, is
exceptionally good, especially for a television film. I hope that it
gets released on video cassette, because it would allow people who
missed it when it was aired to see it. On a scale of zero to five, I
give DANGER ISLAND a five.</P>
<P> DANGER ISLAND is very well written and seems to transcend its
television roots. The story is interesting as well as suspenseful. The
characters are well developed and are ones that found myself wanting to
know more about. The special effects are very well done. The locations
picked for the filming look great. Interestingly enough, the film
leaves itself open for a sequel. Whether this was intended or not I
don't know. If handled correctly, a good sequel could be made.</P>
<P> I think that this is one of the best television films that I have
ever seen. The budget for the film seemed to be larger than most
television films. The major problem with most television films is that
they are either one of the many "disease of the week" films, which I, at
least, am getting sick of, or the film has a good idea, but it seems
that a talented writer couldn't be found that would fit in the budget.
This isn't a "disease of the week" film (thank God!) and the writer did
a great job (meaning he probably won't be working for television for
very much longer).</P>
<PRE>--------
Ken Johnson
<A HREF="mailto:blj@mithrandir.cs.unh.edu">blj@mithrandir.cs.unh.edu</A></PRE>
<PRE>.
</PRE>
<HR><P CLASS=flush><SMALL>The review above was posted to the
<A HREF="news:rec.arts.movies.reviews">rec.arts.movies.reviews</A> newsgroup (<A HREF="news:de.rec.film.kritiken">de.rec.film.kritiken</A> for German reviews).<BR>
The Internet Movie Database accepts no responsibility for the contents of the
review and has no editorial control. Unless stated otherwise, the copyright
belongs to the author.<BR>
Please direct comments/criticisms of the review to relevant newsgroups.<BR>
Broken URLs inthe reviews are the responsibility of the author.<BR>
The formatting of the review is likely to differ from the original due
to ASCII to HTML conversion.
</SMALL></P>
<P ALIGN=CENTER>Related links: <A HREF="/Reviews/">index of all rec.arts.movies.reviews reviews</A></P>
</P></BODY></HTML>
| {
"content_hash": "31c092c75bd0da83fe2d8809795cef09",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 212,
"avg_line_length": 59.20967741935484,
"alnum_prop": 0.723508580768183,
"repo_name": "xianjunzhengbackup/code",
"id": "65b786533144eac55e9600710ae94c56472b8d0a",
"size": "3671",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data science/machine_learning_for_the_web/chapter_4/movie/1566.html",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "BitBake",
"bytes": "113"
},
{
"name": "BlitzBasic",
"bytes": "256"
},
{
"name": "CSS",
"bytes": "49827"
},
{
"name": "HTML",
"bytes": "157006325"
},
{
"name": "JavaScript",
"bytes": "14029"
},
{
"name": "Jupyter Notebook",
"bytes": "4875399"
},
{
"name": "Mako",
"bytes": "2060"
},
{
"name": "Perl",
"bytes": "716"
},
{
"name": "Python",
"bytes": "874414"
},
{
"name": "R",
"bytes": "454"
},
{
"name": "Shell",
"bytes": "3984"
}
],
"symlink_target": ""
} |
Astronomy = {};
Astro = Astronomy;
Astro.modules = {};
Astro.classes = {};
| {
"content_hash": "e918e7b7088c0b15aceb170db3eed48b",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 19,
"avg_line_length": 15.2,
"alnum_prop": 0.618421052631579,
"repo_name": "henry-hz/meteor-astronomy",
"id": "f865e68b3671d5117291bea47748ec83461414e1",
"size": "76",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "lib/core/global.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "69140"
}
],
"symlink_target": ""
} |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Immutable;
using System.Diagnostics;
using Microsoft.CodeAnalysis.Options;
using Roslyn.Utilities;
using static Microsoft.CodeAnalysis.CodeStyle.CodeStyleHelpers;
namespace Microsoft.CodeAnalysis.CodeStyle
{
internal static class CodeStyleOptions2
{
private static readonly ImmutableArray<IOption2>.Builder s_allOptionsBuilder = ImmutableArray.CreateBuilder<IOption2>();
internal static ImmutableArray<IOption2> AllOptions { get; }
private static PerLanguageOption2<T> CreateOption<T>(OptionGroup group, string name, T defaultValue, params OptionStorageLocation2[] storageLocations)
{
var option = new PerLanguageOption2<T>("CodeStyleOptions", group, name, defaultValue, storageLocations);
s_allOptionsBuilder.Add(option);
return option;
}
private static Option2<T> CreateCommonOption<T>(OptionGroup group, string name, T defaultValue, params OptionStorageLocation2[] storageLocations)
{
var option = new Option2<T>("CodeStyleOptions", group, name, defaultValue, storageLocations);
s_allOptionsBuilder.Add(option);
return option;
}
/// <remarks>
/// When user preferences are not yet set for a style, we fall back to the default value.
/// One such default(s), is that the feature is turned on, so that codegen consumes it,
/// but with silent enforcement, so that the user is not prompted about their usage.
/// </remarks>
internal static readonly CodeStyleOption2<bool> TrueWithSilentEnforcement = new(value: true, notification: NotificationOption2.Silent);
internal static readonly CodeStyleOption2<bool> FalseWithSilentEnforcement = new(value: false, notification: NotificationOption2.Silent);
internal static readonly CodeStyleOption2<bool> TrueWithSuggestionEnforcement = new(value: true, notification: NotificationOption2.Suggestion);
internal static readonly CodeStyleOption2<bool> FalseWithSuggestionEnforcement = new(value: false, notification: NotificationOption2.Suggestion);
/// <summary>
/// This option says if we should simplify away the <see langword="this"/>. or <see langword="Me"/>. in field access expressions.
/// </summary>
public static readonly PerLanguageOption2<CodeStyleOption2<bool>> QualifyFieldAccess = CreateOption(
CodeStyleOptionGroups.ThisOrMe, nameof(QualifyFieldAccess),
defaultValue: CodeStyleOption2<bool>.Default,
storageLocations: new OptionStorageLocation2[]{
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_qualification_for_field"),
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.QualifyFieldAccess")});
/// <summary>
/// This option says if we should simplify away the <see langword="this"/>. or <see langword="Me"/>. in property access expressions.
/// </summary>
public static readonly PerLanguageOption2<CodeStyleOption2<bool>> QualifyPropertyAccess = CreateOption(
CodeStyleOptionGroups.ThisOrMe, nameof(QualifyPropertyAccess),
defaultValue: CodeStyleOption2<bool>.Default,
storageLocations: new OptionStorageLocation2[]{
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_qualification_for_property"),
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.QualifyPropertyAccess")});
/// <summary>
/// This option says if we should simplify away the <see langword="this"/>. or <see langword="Me"/>. in method access expressions.
/// </summary>
public static readonly PerLanguageOption2<CodeStyleOption2<bool>> QualifyMethodAccess = CreateOption(
CodeStyleOptionGroups.ThisOrMe, nameof(QualifyMethodAccess),
defaultValue: CodeStyleOption2<bool>.Default,
storageLocations: new OptionStorageLocation2[]{
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_qualification_for_method"),
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.QualifyMethodAccess")});
/// <summary>
/// This option says if we should simplify away the <see langword="this"/>. or <see langword="Me"/>. in event access expressions.
/// </summary>
public static readonly PerLanguageOption2<CodeStyleOption2<bool>> QualifyEventAccess = CreateOption(
CodeStyleOptionGroups.ThisOrMe, nameof(QualifyEventAccess),
defaultValue: CodeStyleOption2<bool>.Default,
storageLocations: new OptionStorageLocation2[]{
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_qualification_for_event"),
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.QualifyEventAccess")});
/// <summary>
/// This option says if we should prefer keyword for Intrinsic Predefined Types in Declarations
/// </summary>
public static readonly PerLanguageOption2<CodeStyleOption2<bool>> PreferIntrinsicPredefinedTypeKeywordInDeclaration = CreateOption(
CodeStyleOptionGroups.PredefinedTypeNameUsage, nameof(PreferIntrinsicPredefinedTypeKeywordInDeclaration),
defaultValue: TrueWithSilentEnforcement,
storageLocations: new OptionStorageLocation2[]{
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_predefined_type_for_locals_parameters_members"),
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.PreferIntrinsicPredefinedTypeKeywordInDeclaration.CodeStyle")});
/// <summary>
/// This option says if we should prefer keyword for Intrinsic Predefined Types in Member Access Expression
/// </summary>
public static readonly PerLanguageOption2<CodeStyleOption2<bool>> PreferIntrinsicPredefinedTypeKeywordInMemberAccess = CreateOption(
CodeStyleOptionGroups.PredefinedTypeNameUsage, nameof(PreferIntrinsicPredefinedTypeKeywordInMemberAccess),
defaultValue: TrueWithSilentEnforcement,
storageLocations: new OptionStorageLocation2[]{
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_predefined_type_for_member_access"),
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.PreferIntrinsicPredefinedTypeKeywordInMemberAccess.CodeStyle")});
internal static readonly PerLanguageOption2<CodeStyleOption2<bool>> PreferObjectInitializer = CreateOption(
CodeStyleOptionGroups.ExpressionLevelPreferences, nameof(PreferObjectInitializer),
defaultValue: TrueWithSuggestionEnforcement,
storageLocations: new OptionStorageLocation2[]{
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_object_initializer"),
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.PreferObjectInitializer")});
internal static readonly PerLanguageOption2<CodeStyleOption2<bool>> PreferCollectionInitializer = CreateOption(
CodeStyleOptionGroups.ExpressionLevelPreferences, nameof(PreferCollectionInitializer),
defaultValue: TrueWithSuggestionEnforcement,
storageLocations: new OptionStorageLocation2[]{
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_collection_initializer"),
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.PreferCollectionInitializer")});
// TODO: Should both the below "_FadeOutCode" options be added to AllOptions?
internal static readonly PerLanguageOption2<bool> PreferObjectInitializer_FadeOutCode = new(
"CodeStyleOptions", nameof(PreferObjectInitializer_FadeOutCode),
defaultValue: false,
storageLocations: new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.PreferObjectInitializer_FadeOutCode"));
internal static readonly PerLanguageOption2<bool> PreferCollectionInitializer_FadeOutCode = new(
"CodeStyleOptions", nameof(PreferCollectionInitializer_FadeOutCode),
defaultValue: false,
storageLocations: new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.PreferCollectionInitializer_FadeOutCode"));
internal static readonly PerLanguageOption2<CodeStyleOption2<bool>> PreferSimplifiedBooleanExpressions = CreateOption(
CodeStyleOptionGroups.ExpressionLevelPreferences, nameof(PreferSimplifiedBooleanExpressions),
defaultValue: TrueWithSuggestionEnforcement,
storageLocations: new OptionStorageLocation2[]{
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_prefer_simplified_boolean_expressions"),
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.PreferSimplifiedBooleanExpressions")});
internal static readonly PerLanguageOption2<OperatorPlacementWhenWrappingPreference> OperatorPlacementWhenWrapping =
CreateOption(
CodeStyleOptionGroups.ExpressionLevelPreferences,
nameof(OperatorPlacementWhenWrapping),
defaultValue: OperatorPlacementWhenWrappingPreference.BeginningOfLine,
storageLocations:
new EditorConfigStorageLocation<OperatorPlacementWhenWrappingPreference>(
"dotnet_style_operator_placement_when_wrapping",
OperatorPlacementUtilities.Parse,
OperatorPlacementUtilities.GetEditorConfigString));
internal static readonly PerLanguageOption2<CodeStyleOption2<bool>> PreferCoalesceExpression = CreateOption(
CodeStyleOptionGroups.ExpressionLevelPreferences, nameof(PreferCoalesceExpression),
defaultValue: TrueWithSuggestionEnforcement,
storageLocations: new OptionStorageLocation2[]{
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_coalesce_expression"),
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.PreferCoalesceExpression") });
internal static readonly PerLanguageOption2<CodeStyleOption2<bool>> PreferNullPropagation = CreateOption(
CodeStyleOptionGroups.ExpressionLevelPreferences, nameof(PreferNullPropagation),
defaultValue: TrueWithSuggestionEnforcement,
storageLocations: new OptionStorageLocation2[]{
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_null_propagation"),
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.PreferNullPropagation") });
internal static readonly PerLanguageOption2<CodeStyleOption2<bool>> PreferExplicitTupleNames = CreateOption(
CodeStyleOptionGroups.ExpressionLevelPreferences, nameof(PreferExplicitTupleNames),
defaultValue: TrueWithSuggestionEnforcement,
storageLocations: new OptionStorageLocation2[] {
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_explicit_tuple_names"),
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.PreferExplicitTupleNames") });
internal static readonly PerLanguageOption2<CodeStyleOption2<bool>> PreferAutoProperties = CreateOption(
CodeStyleOptionGroups.ExpressionLevelPreferences, nameof(PreferAutoProperties),
defaultValue: TrueWithSilentEnforcement,
storageLocations: new OptionStorageLocation2[] {
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_prefer_auto_properties"),
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.PreferAutoProperties") });
internal static readonly PerLanguageOption2<CodeStyleOption2<bool>> PreferInferredTupleNames = CreateOption(
CodeStyleOptionGroups.ExpressionLevelPreferences, nameof(PreferInferredTupleNames),
defaultValue: TrueWithSuggestionEnforcement,
storageLocations: new OptionStorageLocation2[] {
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_prefer_inferred_tuple_names"),
new RoamingProfileStorageLocation($"TextEditor.%LANGUAGE%.Specific.{nameof(PreferInferredTupleNames)}") });
internal static readonly PerLanguageOption2<CodeStyleOption2<bool>> PreferInferredAnonymousTypeMemberNames = CreateOption(
CodeStyleOptionGroups.ExpressionLevelPreferences, nameof(PreferInferredAnonymousTypeMemberNames),
defaultValue: TrueWithSuggestionEnforcement,
storageLocations: new OptionStorageLocation2[] {
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_prefer_inferred_anonymous_type_member_names"),
new RoamingProfileStorageLocation($"TextEditor.%LANGUAGE%.Specific.{nameof(PreferInferredAnonymousTypeMemberNames)}") });
internal static readonly PerLanguageOption2<CodeStyleOption2<bool>> PreferIsNullCheckOverReferenceEqualityMethod = CreateOption(
CodeStyleOptionGroups.ExpressionLevelPreferences, nameof(PreferIsNullCheckOverReferenceEqualityMethod),
defaultValue: TrueWithSuggestionEnforcement,
storageLocations: new OptionStorageLocation2[]{
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_prefer_is_null_check_over_reference_equality_method"),
new RoamingProfileStorageLocation($"TextEditor.%LANGUAGE%.Specific.{nameof(PreferIsNullCheckOverReferenceEqualityMethod)}") });
internal static readonly PerLanguageOption2<CodeStyleOption2<bool>> PreferConditionalExpressionOverAssignment = CreateOption(
CodeStyleOptionGroups.ExpressionLevelPreferences, nameof(PreferConditionalExpressionOverAssignment),
defaultValue: TrueWithSilentEnforcement,
storageLocations: new OptionStorageLocation2[]{
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_prefer_conditional_expression_over_assignment"),
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.PreferConditionalExpressionOverAssignment")});
internal static readonly PerLanguageOption2<CodeStyleOption2<bool>> PreferConditionalExpressionOverReturn = CreateOption(
CodeStyleOptionGroups.ExpressionLevelPreferences, nameof(PreferConditionalExpressionOverReturn),
defaultValue: TrueWithSilentEnforcement,
storageLocations: new OptionStorageLocation2[]{
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_prefer_conditional_expression_over_return"),
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.PreferConditionalExpressionOverReturn")});
internal static readonly PerLanguageOption2<CodeStyleOption2<bool>> PreferCompoundAssignment = CreateOption(
CodeStyleOptionGroups.ExpressionLevelPreferences,
nameof(PreferCompoundAssignment),
defaultValue: TrueWithSuggestionEnforcement,
storageLocations: new OptionStorageLocation2[]{
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_prefer_compound_assignment"),
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.PreferCompoundAssignment") });
internal static readonly PerLanguageOption2<CodeStyleOption2<bool>> PreferSimplifiedInterpolation = CreateOption(
CodeStyleOptionGroups.ExpressionLevelPreferences, nameof(PreferSimplifiedInterpolation),
defaultValue: TrueWithSuggestionEnforcement,
storageLocations: new OptionStorageLocation2[]{
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_prefer_simplified_interpolation"),
new RoamingProfileStorageLocation($"TextEditor.%LANGUAGE%.Specific.{nameof(PreferSimplifiedInterpolation)}") });
private static readonly CodeStyleOption2<UnusedParametersPreference> s_preferNoneUnusedParametersPreference =
new(default, NotificationOption2.None);
private static readonly CodeStyleOption2<UnusedParametersPreference> s_preferAllMethodsUnusedParametersPreference =
new(UnusedParametersPreference.AllMethods, NotificationOption2.Suggestion);
// TODO: https://github.com/dotnet/roslyn/issues/31225 tracks adding CodeQualityOption<T> and CodeQualityOptions
// and moving this option to CodeQualityOptions.
internal static readonly PerLanguageOption2<CodeStyleOption2<UnusedParametersPreference>> UnusedParameters = CreateOption(
CodeStyleOptionGroups.Parameter,
nameof(UnusedParameters),
defaultValue: s_preferAllMethodsUnusedParametersPreference,
storageLocations: new OptionStorageLocation2[]{
new EditorConfigStorageLocation<CodeStyleOption2<UnusedParametersPreference>>(
"dotnet_code_quality_unused_parameters",
ParseUnusedParametersPreference,
o => GetUnusedParametersPreferenceEditorConfigString(o, s_preferAllMethodsUnusedParametersPreference.Value)),
new RoamingProfileStorageLocation($"TextEditor.%LANGUAGE%.Specific.{nameof(UnusedParameters)}Preference") });
private static readonly CodeStyleOption2<AccessibilityModifiersRequired> s_requireAccessibilityModifiersDefault =
new(AccessibilityModifiersRequired.ForNonInterfaceMembers, NotificationOption2.Silent);
internal static readonly PerLanguageOption2<CodeStyleOption2<AccessibilityModifiersRequired>> RequireAccessibilityModifiers =
CreateOption(
CodeStyleOptionGroups.Modifier, nameof(RequireAccessibilityModifiers),
defaultValue: s_requireAccessibilityModifiersDefault,
storageLocations: new OptionStorageLocation2[]{
new EditorConfigStorageLocation<CodeStyleOption2<AccessibilityModifiersRequired>>(
"dotnet_style_require_accessibility_modifiers",
s => ParseAccessibilityModifiersRequired(s),
GetAccessibilityModifiersRequiredEditorConfigString),
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.RequireAccessibilityModifiers")});
internal static readonly PerLanguageOption2<CodeStyleOption2<bool>> PreferReadonly = CreateOption(
CodeStyleOptionGroups.Field, nameof(PreferReadonly),
defaultValue: TrueWithSuggestionEnforcement,
storageLocations: new OptionStorageLocation2[]{
EditorConfigStorageLocation.ForBoolCodeStyleOption("dotnet_style_readonly_field"),
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.PreferReadonly") });
internal static readonly Option2<string> FileHeaderTemplate = CreateCommonOption(
CodeStyleOptionGroups.Usings, nameof(FileHeaderTemplate),
defaultValue: "",
EditorConfigStorageLocation.ForStringOption("file_header_template", emptyStringRepresentation: "unset"));
internal static readonly Option2<string> RemoveUnnecessarySuppressionExclusions = CreateCommonOption(
CodeStyleOptionGroups.Suppressions,
nameof(RemoveUnnecessarySuppressionExclusions),
defaultValue: "",
storageLocations: new OptionStorageLocation2[]{
EditorConfigStorageLocation.ForStringOption("dotnet_remove_unnecessary_suppression_exclusions", emptyStringRepresentation: "none"),
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.RemoveUnnecessarySuppressionExclusions") });
private static readonly BidirectionalMap<string, AccessibilityModifiersRequired> s_accessibilityModifiersRequiredMap =
new(new[]
{
KeyValuePairUtil.Create("never", AccessibilityModifiersRequired.Never),
KeyValuePairUtil.Create("always", AccessibilityModifiersRequired.Always),
KeyValuePairUtil.Create("for_non_interface_members", AccessibilityModifiersRequired.ForNonInterfaceMembers),
KeyValuePairUtil.Create("omit_if_default", AccessibilityModifiersRequired.OmitIfDefault),
});
private static CodeStyleOption2<AccessibilityModifiersRequired> ParseAccessibilityModifiersRequired(string optionString)
{
if (TryGetCodeStyleValueAndOptionalNotification(optionString,
out var value, out var notificationOpt))
{
if (value == "never")
{
// If they provide 'never', they don't need a notification level.
notificationOpt ??= NotificationOption2.Silent;
}
if (notificationOpt is object)
{
Debug.Assert(s_accessibilityModifiersRequiredMap.ContainsKey(value));
return new CodeStyleOption2<AccessibilityModifiersRequired>(s_accessibilityModifiersRequiredMap.GetValueOrDefault(value), notificationOpt);
}
}
return s_requireAccessibilityModifiersDefault;
}
private static string GetAccessibilityModifiersRequiredEditorConfigString(CodeStyleOption2<AccessibilityModifiersRequired> option)
{
// If they provide 'never', they don't need a notification level.
if (option.Notification == null)
{
Debug.Assert(s_accessibilityModifiersRequiredMap.ContainsValue(AccessibilityModifiersRequired.Never));
return s_accessibilityModifiersRequiredMap.GetKeyOrDefault(AccessibilityModifiersRequired.Never);
}
Debug.Assert(s_accessibilityModifiersRequiredMap.ContainsValue(option.Value));
return $"{s_accessibilityModifiersRequiredMap.GetKeyOrDefault(option.Value)}:{option.Notification.ToEditorConfigString()}";
}
private static readonly CodeStyleOption2<ParenthesesPreference> s_alwaysForClarityPreference =
new(ParenthesesPreference.AlwaysForClarity, NotificationOption2.Silent);
private static readonly CodeStyleOption2<ParenthesesPreference> s_neverIfUnnecessaryPreference =
new(ParenthesesPreference.NeverIfUnnecessary, NotificationOption2.Silent);
private static PerLanguageOption2<CodeStyleOption2<ParenthesesPreference>> CreateParenthesesOption(
string fieldName, CodeStyleOption2<ParenthesesPreference> defaultValue,
string styleName)
{
return CreateOption(
CodeStyleOptionGroups.Parentheses, fieldName, defaultValue,
storageLocations: new OptionStorageLocation2[]{
new EditorConfigStorageLocation<CodeStyleOption2<ParenthesesPreference>>(
styleName,
s => ParseParenthesesPreference(s, defaultValue),
v => GetParenthesesPreferenceEditorConfigString(v)),
new RoamingProfileStorageLocation($"TextEditor.%LANGUAGE%.Specific.{fieldName}Preference")});
}
internal static readonly PerLanguageOption2<CodeStyleOption2<ParenthesesPreference>> ArithmeticBinaryParentheses =
CreateParenthesesOption(
nameof(ArithmeticBinaryParentheses),
s_alwaysForClarityPreference,
"dotnet_style_parentheses_in_arithmetic_binary_operators");
internal static readonly PerLanguageOption2<CodeStyleOption2<ParenthesesPreference>> OtherBinaryParentheses =
CreateParenthesesOption(
nameof(OtherBinaryParentheses),
s_alwaysForClarityPreference,
"dotnet_style_parentheses_in_other_binary_operators");
internal static readonly PerLanguageOption2<CodeStyleOption2<ParenthesesPreference>> RelationalBinaryParentheses =
CreateParenthesesOption(
nameof(RelationalBinaryParentheses),
s_alwaysForClarityPreference,
"dotnet_style_parentheses_in_relational_binary_operators");
internal static readonly PerLanguageOption2<CodeStyleOption2<ParenthesesPreference>> OtherParentheses =
CreateParenthesesOption(
nameof(OtherParentheses),
s_neverIfUnnecessaryPreference,
"dotnet_style_parentheses_in_other_operators");
private static readonly BidirectionalMap<string, ParenthesesPreference> s_parenthesesPreferenceMap =
new(new[]
{
KeyValuePairUtil.Create("always_for_clarity", ParenthesesPreference.AlwaysForClarity),
KeyValuePairUtil.Create("never_if_unnecessary", ParenthesesPreference.NeverIfUnnecessary),
});
private static readonly BidirectionalMap<string, UnusedParametersPreference> s_unusedParametersPreferenceMap =
new(new[]
{
KeyValuePairUtil.Create("non_public", UnusedParametersPreference.NonPublicMethods),
KeyValuePairUtil.Create("all", UnusedParametersPreference.AllMethods),
});
internal static readonly PerLanguageOption2<CodeStyleOption2<bool>> PreferSystemHashCode = CreateOption(
CodeStyleOptionGroups.ExpressionLevelPreferences,
nameof(PreferSystemHashCode),
defaultValue: TrueWithSuggestionEnforcement,
storageLocations: new OptionStorageLocation2[]{
new RoamingProfileStorageLocation("TextEditor.%LANGUAGE%.Specific.PreferSystemHashCode") });
static CodeStyleOptions2()
{
// Note that the static constructor executes after all the static field initializers for the options have executed,
// and each field initializer adds the created option to s_allOptionsBuilder.
AllOptions = s_allOptionsBuilder.ToImmutable();
}
private static Optional<CodeStyleOption2<ParenthesesPreference>> ParseParenthesesPreference(
string optionString, Optional<CodeStyleOption2<ParenthesesPreference>> defaultValue)
{
if (TryGetCodeStyleValueAndOptionalNotification(optionString,
out var value, out var notificationOpt))
{
Debug.Assert(s_parenthesesPreferenceMap.ContainsKey(value));
return new CodeStyleOption2<ParenthesesPreference>(s_parenthesesPreferenceMap.GetValueOrDefault(value),
notificationOpt ?? NotificationOption2.Silent);
}
return defaultValue;
}
private static string GetParenthesesPreferenceEditorConfigString(CodeStyleOption2<ParenthesesPreference> option)
{
Debug.Assert(s_parenthesesPreferenceMap.ContainsValue(option.Value));
var value = s_parenthesesPreferenceMap.GetKeyOrDefault(option.Value) ?? s_parenthesesPreferenceMap.GetKeyOrDefault(ParenthesesPreference.AlwaysForClarity);
return option.Notification == null ? value : $"{value}:{option.Notification.ToEditorConfigString()}";
}
private static Optional<CodeStyleOption2<UnusedParametersPreference>> ParseUnusedParametersPreference(string optionString)
{
if (TryGetCodeStyleValueAndOptionalNotification(optionString,
out var value, out var notificationOpt))
{
return new CodeStyleOption2<UnusedParametersPreference>(
s_unusedParametersPreferenceMap.GetValueOrDefault(value), notificationOpt ?? NotificationOption2.Suggestion);
}
return s_preferNoneUnusedParametersPreference;
}
private static string GetUnusedParametersPreferenceEditorConfigString(CodeStyleOption2<UnusedParametersPreference> option, UnusedParametersPreference defaultPreference)
{
Debug.Assert(s_unusedParametersPreferenceMap.ContainsValue(option.Value));
var value = s_unusedParametersPreferenceMap.GetKeyOrDefault(option.Value) ?? s_unusedParametersPreferenceMap.GetKeyOrDefault(defaultPreference);
return option.Notification == null ? value : $"{value}:{option.Notification.ToEditorConfigString()}";
}
}
internal static class CodeStyleOptionGroups
{
public static readonly OptionGroup Usings = new(CompilerExtensionsResources.Organize_usings, priority: 1);
public static readonly OptionGroup ThisOrMe = new(CompilerExtensionsResources.this_dot_and_Me_dot_preferences, priority: 2);
public static readonly OptionGroup PredefinedTypeNameUsage = new(CompilerExtensionsResources.Language_keywords_vs_BCL_types_preferences, priority: 3);
public static readonly OptionGroup Parentheses = new(CompilerExtensionsResources.Parentheses_preferences, priority: 4);
public static readonly OptionGroup Modifier = new(CompilerExtensionsResources.Modifier_preferences, priority: 5);
public static readonly OptionGroup ExpressionLevelPreferences = new(CompilerExtensionsResources.Expression_level_preferences, priority: 6);
public static readonly OptionGroup Field = new(CompilerExtensionsResources.Field_preferences, priority: 7);
public static readonly OptionGroup Parameter = new(CompilerExtensionsResources.Parameter_preferences, priority: 8);
public static readonly OptionGroup Suppressions = new(CompilerExtensionsResources.Suppression_preferences, priority: 9);
}
}
| {
"content_hash": "aed31ea1a82710c033594b934509af2f",
"timestamp": "",
"source": "github",
"line_count": 444,
"max_line_length": 176,
"avg_line_length": 67.66216216216216,
"alnum_prop": 0.7262832035150789,
"repo_name": "aelij/roslyn",
"id": "7538bffc15f77dccfd4f77270d49a33adaf615b3",
"size": "30044",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/Workspaces/SharedUtilitiesAndExtensions/Compiler/Core/CodeStyle/CodeStyleOptions2.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "1C Enterprise",
"bytes": "257760"
},
{
"name": "Batchfile",
"bytes": "8025"
},
{
"name": "C#",
"bytes": "140151209"
},
{
"name": "C++",
"bytes": "5602"
},
{
"name": "CMake",
"bytes": "9153"
},
{
"name": "Dockerfile",
"bytes": "2450"
},
{
"name": "F#",
"bytes": "549"
},
{
"name": "PowerShell",
"bytes": "242316"
},
{
"name": "Shell",
"bytes": "94467"
},
{
"name": "Visual Basic .NET",
"bytes": "71784276"
}
],
"symlink_target": ""
} |
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=US-ASCII">
<title>Struct temperature_base_dimension</title>
<link rel="stylesheet" href="../../../../doc/src/boostbook.css" type="text/css">
<meta name="generator" content="DocBook XSL Stylesheets V1.76.1">
<link rel="home" href="../../index.html" title="The Boost C++ Libraries BoostBook Documentation Subset">
<link rel="up" href="../../boost_units/Reference.html#header.boost.units.physical_dimensions.temperature_hpp" title="Header <boost/units/physical_dimensions/temperature.hpp>">
<link rel="prev" href="solid_angle_base_dimension.html" title="Struct solid_angle_base_dimension">
<link rel="next" href="time_base_dimension.html" title="Struct time_base_dimension">
</head>
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
<table cellpadding="2" width="100%"><tr>
<td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../boost.png"></td>
<td align="center"><a href="../../../../index.html">Home</a></td>
<td align="center"><a href="../../../../libs/libraries.htm">Libraries</a></td>
<td align="center"><a href="http://www.boost.org/users/people.html">People</a></td>
<td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td>
<td align="center"><a href="../../../../more/index.htm">More</a></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="solid_angle_base_dimension.html"><img src="../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../boost_units/Reference.html#header.boost.units.physical_dimensions.temperature_hpp"><img src="../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="time_base_dimension.html"><img src="../../../../doc/src/images/next.png" alt="Next"></a>
</div>
<div class="refentry">
<a name="boost.units.temperature_base_dimension"></a><div class="titlepage"></div>
<div class="refnamediv">
<h2><span class="refentrytitle">Struct temperature_base_dimension</span></h2>
<p>boost::units::temperature_base_dimension — base dimension of temperature </p>
</div>
<h2 xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv-title">Synopsis</h2>
<div xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv"><pre class="synopsis"><span class="comment">// In header: <<a class="link" href="../../boost_units/Reference.html#header.boost.units.physical_dimensions.temperature_hpp" title="Header <boost/units/physical_dimensions/temperature.hpp>">boost/units/physical_dimensions/temperature.hpp</a>>
</span>
<span class="keyword">struct</span> <a class="link" href="temperature_base_dimension.html" title="Struct temperature_base_dimension">temperature_base_dimension</a> <span class="special">{</span>
<span class="special">}</span><span class="special">;</span></pre></div>
</div>
<table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr>
<td align="left"></td>
<td align="right"><div class="copyright-footer">Copyright © 2003-2008 Matthias Christian Schabel<br>Copyright © 2007-2010 Steven
Watanabe<p>
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>)
</p>
</div></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="solid_angle_base_dimension.html"><img src="../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../boost_units/Reference.html#header.boost.units.physical_dimensions.temperature_hpp"><img src="../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="time_base_dimension.html"><img src="../../../../doc/src/images/next.png" alt="Next"></a>
</div>
</body>
</html>
| {
"content_hash": "cbcfc193c18dbaa362ab1ab95dbe9d8f",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 502,
"avg_line_length": 79.03846153846153,
"alnum_prop": 0.6802919708029197,
"repo_name": "hand-iemura/lightpng",
"id": "e57e3678de9de8e15e8de4cdedcfd55eb73e04ae",
"size": "4110",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "boost_1_53_0/doc/html/boost/units/temperature_base_dimension.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "139512"
},
{
"name": "Batchfile",
"bytes": "43970"
},
{
"name": "C",
"bytes": "2306793"
},
{
"name": "C#",
"bytes": "40804"
},
{
"name": "C++",
"bytes": "139009726"
},
{
"name": "CMake",
"bytes": "1741"
},
{
"name": "CSS",
"bytes": "309758"
},
{
"name": "Cuda",
"bytes": "26749"
},
{
"name": "FORTRAN",
"bytes": "1387"
},
{
"name": "Groff",
"bytes": "8039"
},
{
"name": "HTML",
"bytes": "139153356"
},
{
"name": "IDL",
"bytes": "14"
},
{
"name": "JavaScript",
"bytes": "132031"
},
{
"name": "Lex",
"bytes": "1255"
},
{
"name": "M4",
"bytes": "29689"
},
{
"name": "Makefile",
"bytes": "1074346"
},
{
"name": "Max",
"bytes": "36857"
},
{
"name": "Objective-C",
"bytes": "3745"
},
{
"name": "PHP",
"bytes": "59030"
},
{
"name": "Perl",
"bytes": "29502"
},
{
"name": "Perl6",
"bytes": "2053"
},
{
"name": "Python",
"bytes": "1710815"
},
{
"name": "QML",
"bytes": "593"
},
{
"name": "Rebol",
"bytes": "354"
},
{
"name": "Shell",
"bytes": "376263"
},
{
"name": "Tcl",
"bytes": "1172"
},
{
"name": "TeX",
"bytes": "13404"
},
{
"name": "XSLT",
"bytes": "761090"
},
{
"name": "Yacc",
"bytes": "18910"
}
],
"symlink_target": ""
} |
package com.redthirddivision.firestorm.rendering;
import java.awt.Graphics2D;
/**
* <strong>Project:</strong> Firestorm <br>
* <strong>File:</strong> ParallaxEngine.java
*
* @author <a href = "http://bossletsplays.com"> Matthew Rogers</a>
*/
public class ParallaxEngine {
private ParallaxLayer[] layers;
public ParallaxEngine(ParallaxLayer... layers) {
this.layers = layers;
}
public void setRight() {
for (int i = 0; i < layers.length; i++)
layers[i].setRight();
}
public void setLeft() {
for (int i = 0; i < layers.length; i++)
layers[i].setLeft();
}
public void stop() {
for (int i = 0; i < layers.length; i++)
layers[i].stop();
}
public void move() {
for (int i = 0; i < layers.length; i++)
layers[i].move();
}
public void render(Graphics2D g) {
for (int i = 0; i < layers.length; i++)
layers[i].render(g);
}
}
| {
"content_hash": "daeb99ec1386fdfbdd7da22d00c4f929",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 67,
"avg_line_length": 22.022222222222222,
"alnum_prop": 0.5519677093844602,
"repo_name": "BossLetsPlays/Firestorm",
"id": "b9f54cd92d5c02765af202ebb4c7431320de5608",
"size": "1606",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "source/com/redthirddivision/firestorm/rendering/ParallaxEngine.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "62954"
}
],
"symlink_target": ""
} |
#include "chronotext/utils/glUtils.h"
#include "chronotext/utils/Utils.h"
#include "chronotext/utils/MathUtils.h"
using namespace std;
using namespace ci;
const Matrix44f getPerspectiveMatrix(float fovy, float aspect, float zNear, float zFar)
{
float ymax = zNear * math<float>::tan(fovy * PI / 360);
float ymin = -ymax;
float xmin = ymin * aspect;
float xmax = ymax * aspect;
return getFrustumMatrix(xmin, xmax, ymin, ymax, zNear, zFar);
}
/*
* SPECIAL VERSION TAKING IN COUNT PAN AND ZOOM
*/
const Matrix44f getPerspectiveMatrix(float fovy, float zNear, float zFar, float width, float height, float panX, float panY, float zoom)
{
float halfHeight = zNear * math<float>::tan(fovy * PI / 360) / zoom;
float halfWidth = halfHeight * width / height;
float offsetX = -panX * (halfWidth * 2 / width);
float offsetY = -panY * (halfHeight * 2 / height);
return getFrustumMatrix(-halfWidth + offsetX, halfWidth + offsetX, -halfHeight + offsetY, halfHeight + offsetY, zNear, zFar);
}
/*
* BASED ON CODE FROM http://www.mesa3d.org
*/
const Matrix44f getFrustumMatrix(float left, float right, float bottom, float top, float znear, float zfar)
{
float x = (2 * znear) / (right - left);
float y = (2 * znear) / (top - bottom);
float a = (right + left) / (right - left);
float b = (top + bottom) / (top - bottom);
float c = -(zfar + znear) / ( zfar - znear);
float d = -(2 * zfar * znear) / (zfar - znear);
float m[] =
{
x, 0, 0, 0,
0, y, 0, 0,
a, b, c, -1,
0, 0, d, 0
};
return Matrix44f(m);
}
void drawGrid(const Rectf &bounds, float size, const Vec2f &offset)
{
drawGrid(bounds, size, size, offset);
}
void drawGrid(const Rectf &bounds, float sx, float sy, const Vec2f &offset)
{
float x1 = bounds.x1 - boundf(bounds.x1 - offset.x, sx);
float y1 = bounds.y1 - boundf(bounds.y1 - offset.y, sy);
int nx = (int)math<float>::ceil(bounds.getWidth() / sx) + 1;
int ny = (int)math<float>::ceil(bounds.getHeight() / sy) + 1;
vector<Vec2f> vertices;
vertices.reserve((nx + ny) * 4);
for (int iy = 0; iy < ny; iy++)
{
float y = y1 + iy * sy;
vertices.push_back(Vec2f(bounds.x1, y));
vertices.push_back(Vec2f(bounds.x2, y));
}
for (int ix = 0; ix < nx; ix++)
{
float x = x1 + ix * sx;
vertices.push_back(Vec2f(x, bounds.y1));
vertices.push_back(Vec2f(x, bounds.y2));
}
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(2, GL_FLOAT, 0, vertices.data());
glDrawArrays(GL_LINES, 0, vertices.size());
glDisableClientState(GL_VERTEX_ARRAY);
}
void drawFullScreenQuad()
{
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glLoadIdentity();
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadIdentity();
const float vertices[] =
{
-1, -1,
+1, -1,
+1, +1,
-1, +1
};
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(2, GL_FLOAT, 0, vertices);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
glDisableClientState(GL_VERTEX_ARRAY);
glPopMatrix();
glMatrixMode(GL_MODELVIEW);
glPopMatrix();
}
void dumpCamera(const Camera &cam, const string &name)
{
Vec3f worldUp = cam.getWorldUp();
Vec3f eyepoint = cam.getEyePoint();
Vec3f centerOfInterestPoint = cam.getCenterOfInterestPoint();
Quatf orientation = cam.getOrientation();
Vec3f axis = orientation.getAxis();
LOGI << name << ".setWorldUp(Vec3f(" << worldUp.x << ", " << worldUp.y << ", " << worldUp.z << "));" << endl;
LOGI << name << ".setEyePoint(Vec3f(" << eyepoint.x << ", " << eyepoint.y << ", " << eyepoint.z << "));" << endl;
LOGI << name << ".setCenterOfInterestPoint(Vec3f(" << centerOfInterestPoint.x << ", " << centerOfInterestPoint.y << ", " << centerOfInterestPoint.z << "));" << endl;
LOGI << name << ".setOrientation(Quatf(Vec3f(" << axis.x << ", " << axis.y << ", " << axis.z << "), " << orientation.getAngle() << "));" << endl;
LOGI << name << ".setPerspective(" << cam.getFov() << ", getWindowAspectRatio(), " << cam.getNearClip() << ", " << cam.getFarClip() << ");" << endl;
}
| {
"content_hash": "5cc3ccbb3649e43871b1e8717cb4668c",
"timestamp": "",
"source": "github",
"line_count": 132,
"max_line_length": 169,
"avg_line_length": 32.28030303030303,
"alnum_prop": 0.595869514198545,
"repo_name": "michaelboccara/new-chronotext-toolkit",
"id": "5f22884296ebf43ed297b8d99828c10948a9e429",
"size": "4261",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/chronotext/utils/glUtils.cpp",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "44087"
},
{
"name": "C++",
"bytes": "285820"
},
{
"name": "Java",
"bytes": "13525"
},
{
"name": "Objective-C",
"bytes": "24855"
}
],
"symlink_target": ""
} |
package uk.co.compendiumdev.javafortesters.domain.http;
import java.util.HashMap;
import java.util.Map;
public class HttpResponse {
public int statusCode;
public String body;
private Map<String, String> headers = new HashMap<>();
public Map<String,String> getHeaders() {
return headers;
}
public void setHeaders(Map<String,String> headers) {
this.headers.putAll(headers);
}
public String raw() {
StringBuilder rawOutput = new StringBuilder();
for(Map.Entry<String, String> header : headers.entrySet()){
if(header.getKey()==null) {
rawOutput.append(header.getValue());
rawOutput.append("\n");
}
}
for(Map.Entry<String, String> header : headers.entrySet()){
if(header.getKey()!=null) {
if (header.getKey().equalsIgnoreCase("Transfer-Encoding")) {
//skip it
} else {
rawOutput.append(header.getKey());
rawOutput.append(": ");
rawOutput.append(header.getValue());
rawOutput.append("\n");
}
}
}
if(body!=null){
rawOutput.append("\n");
rawOutput.append(body);
}
return rawOutput.toString();
}
}
| {
"content_hash": "796477eeafe1ca3e06f7a170005ca068",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 76,
"avg_line_length": 26.326923076923077,
"alnum_prop": 0.5303140978816655,
"repo_name": "eviltester/testtoolhub",
"id": "9275ad95ca7cddfc7b176e7b561fa4de2a49227d",
"size": "1369",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/uk/co/compendiumdev/javafortesters/domain/http/HttpResponse.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "174214"
}
],
"symlink_target": ""
} |
package ping.pong.net.connection.messaging;
/**
*
* @param <Message>
* @author mfullen
*/
public class DefaultEnvelope<Message> implements Envelope<Message>
{
private boolean reliable = true;
private Message message;
/**
*
* @param reliable
*/
public void setReliable(boolean reliable)
{
this.reliable = reliable;
}
/**
*
* @param message
*/
public void setMessage(Message message)
{
this.message = message;
}
@Override
public boolean isReliable()
{
return reliable;
}
@Override
public Message getMessage()
{
return message;
}
@Override
public String toString()
{
return "DefaultEnvelope{" + "reliable=" + reliable + ", message=" + message + '}';
}
}
| {
"content_hash": "78f78361571e6dccb8b905f118d2a329",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 90,
"avg_line_length": 17.0625,
"alnum_prop": 0.5714285714285714,
"repo_name": "mfullen/ping-pong-net",
"id": "8ab46b2cb18386be37abe53acf90d7ea7923fea2",
"size": "819",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pingpongnet/src/main/java/ping/pong/net/connection/messaging/DefaultEnvelope.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "251239"
}
],
"symlink_target": ""
} |
.. _vSphere API 5.0: ../../vim/version.rst#vimversionversion7
.. _vim.event.AlarmEvent: ../../vim/event/AlarmEvent.rst
.. _vim.event.ManagedEntityEventArgument: ../../vim/event/ManagedEntityEventArgument.rst
vim.event.AlarmAcknowledgedEvent
================================
This event records the acknowledgement of an Alarm
:extends: vim.event.AlarmEvent_
:since: `vSphere API 5.0`_
Attributes:
source (`vim.event.ManagedEntityEventArgument`_):
The entity that triggered the alarm.
entity (`vim.event.ManagedEntityEventArgument`_):
The entity with which the alarm is registered.
| {
"content_hash": "100043a8041d6faf6cbf9ca5ee389a2b",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 88,
"avg_line_length": 30.55,
"alnum_prop": 0.6972176759410802,
"repo_name": "rhadman/pyvmomi",
"id": "3f135ea36ba2069452f72537fe679c8a1704c60f",
"size": "611",
"binary": false,
"copies": "15",
"ref": "refs/heads/master",
"path": "docs/vim/event/AlarmAcknowledgedEvent.rst",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1157026"
}
],
"symlink_target": ""
} |
(function (window, document) {
'use strict';
/**
* Get requestAnimationFrame function if available.
* @type {function|boolean}
*/
var RAF = window.requestAnimationFrame || window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame || window.oRequestAnimationFrame || window.msRequestAnimationFrame || false;
/**
* Check if script is already present.
* @type {object|boolean}
*/
var old_APP = typeof window.RAFstats !== 'undefined' ? window.RAFstats : false;
/**
* Reference for the APP in use - old or this one.
* (set at the end of the script)
*/
var active_APP;
/**
* Short-hand for microtime.
* @returns {number} Current date and time in milliseconds
*/
var NOW = function () {
return (new Date()).getTime();
};
/**
* Main application object.
*
* Contains main interactive behaviour.
* API interface set in here.
*/
var APP = {
/**
* Is application initialised - flag.
* @type {boolean}
*/
initialised: false,
/**
* Check if touch is available.
* @type {boolean}
*/
touch_support: ('ontouchstart' in window || 'onmsgesturechange' in window),
/**
* Main dimensions settings.
* @type {object}
*/
settings: {
/**
* Monitor width (in px).
* @type {number}
*/
width: 150,
/**
* Monitor height (in px).
* @type {number}
*/
height: 50
},
/**
* Holds instance of Stats class.
* @type {null|object}
*/
stats: null,
/**
* Holds instance of Dispaly class.
* @type {null|object}
*/
display: null,
/**
* Active view id.
* @type {number}
*/
view: 0,
/**
* List of available views in Display class.
* @type {array}
*/
layouts: ['viewFps', 'viewMs', 'viewAll', 'viewSimple'],
/**
* Request animation loop function.
*/
getFrame: function () {
// Add raf callback
RAF(function () {
// Run only if application is running
if (APP.initialised) {
APP.stats.nextFrame();
APP.drawLayout();
APP.getFrame();
}
});
},
/**
* Draw selected view.
*/
drawLayout: function () {
// View is executed as a function in Display class
APP.display[APP.layouts[APP.view]]();
},
/**
* Interactive behaviour of the application.
* @type {object}
*/
behaviour: {
/**
* Minimum distance (in px) needed to do swipe gesture.
* @type {number}
*/
swipe_diff: 25,
/**
* Is swipe gesture - flag.
* @type {boolean}
*/
swipe: false,
/**
* Action started - flag.
* @type {boolean}
*/
active: false,
/**
* Mouse/touch X position at the beginning of the action.
* @type {number}
*/
pos_x: 0,
/**
* Mouse/touch Y position at the beginning of the action.
* @type {number}
*/
pos_y: 0,
/**
* Execute action based on the gesture.
* @param diff_x {number}
* @param diff_y {number}
* @param swipe {boolean}
*/
action: function (diff_x, diff_y, swipe) {
var swipe_diff = APP.behaviour.swipe_diff;
// If swipe add min swipe value - triggers swipe
if (swipe) {
diff_x += swipe_diff;
diff_y += swipe_diff;
}
// Check if gesture position difference is swipe
if (diff_x > swipe_diff || diff_y > swipe_diff) {
if (diff_x > diff_y) {
// Horizontal swipe - toggle monitor position (left-right)
APP.api.toggleSides();
} else {
// Vertical swipe - reset stats
APP.api.reset();
}
} else {
// If not swipe then trigger click event - display next view
APP.api.nextView();
}
},
/**
* Special event for handling all event types.
* @param ev {object}
*/
handleEvent: function (ev) {
var self = APP.behaviour,
diff_x,
diff_y,
pos_x,
pos_y,
touch;
// Check if event is type of touch or mouse and set coordinate variables
if (ev.type === 'touchstart' || ev.type === 'touchleave' || ev.type === 'touchcancel' || ev.type === 'touchend') {
// Get first touch event from touch list
touch = ev.touches[0] || ev.changedTouches[0];
// Touch event - set coordinate variables
pos_x = touch.pageX;
pos_y = touch.pageY;
} else {
// Mouse event - set coordinate variables
pos_x = ev.clientX;
pos_y = ev.clientY;
}
// Start of the event (all other events are considered as the end in this case)
if (ev.type === 'mousedown' || ev.type === 'touchstart') {
// Save start coordinates
self.pos_x = pos_x;
self.pos_y = pos_y;
self.active = true;
} else {
// At event end check if gesture left the monitor element - considered as swipe
if (ev.type === 'mouseout' || ev.type === 'touchleave' || ev.type === 'touchcancel') {
self.swipe = true;
}
// Only if gesture is still in progress - calc distance and pass it to process
if (self.active) {
diff_x = Math.abs(self.pos_x - pos_x);
diff_y = Math.abs(self.pos_y - pos_y);
self.action(diff_x, diff_y, self.swipe);
self.active = false;
}
self.swipe = false;
}
ev.stopPropagation();
ev.preventDefault();
}
},
/**
* Application API interface.
* @type {object}
*/
api: {
/**
* Initialise this aplication.
* @returns {boolean}
*/
init: function () {
/**
* Event listener short-hand.
* Bind event by name to monitor (canvas) element.
* Event types are handled by special object APP.behaviour.
* @param name
*/
var addEvent = function (name) {
APP.display.canvas.addEventListener(name, APP.behaviour, false);
};
if (!APP.initialised) {
// Add new Display class (creates canvas etc.)
APP.display = new Display(APP.settings.width, APP.settings.height);
if (APP.display.supported) {
addEvent('mousedown');
addEvent('mouseup');
addEvent('mouseout');
APP.display.canvas.style.cursor = 'pointer';
if (APP.touch_support) {
addEvent('touchstart');
addEvent('touchleave');
addEvent('touchcancel');
addEvent('touchend');
}
// Add new Stats object (calculates stats and keep history)
APP.stats = new Stats(APP.settings.width);
// Set as initialised and start request animation frame loop
APP.initialised = true;
APP.getFrame();
return true;
}
}
return false;
},
/**
* Loops/toggles through different views.
* @returns {number}
*/
nextView: function () {
return APP.view = APP.view + 1 >= APP.layouts.length ? 0 : APP.view + 1;
},
/**
* Toggle position of the monitor/canvas between top-left and top-right.
* @returns {boolean}
*/
toggleSides: function () {
if (APP.initialised) {
APP.display.canvas.style.right = APP.display.canvas.style.right === 'auto' ? 0 : 'auto';
APP.display.canvas.style.left = APP.display.canvas.style.left === 'auto' ? 0 : 'auto';
return true;
}
return false;
},
/**
* Resets application stats.
* (if application on initialised then do it)
* @returns {boolean}
*/
reset: function () {
if (APP.initialised) {
APP.stats = new Stats(APP.settings.width);
Display._fps_refresh = null;
return true;
} else {
return APP.api.init();
}
},
/**
* Stop application, reset all stats and removed monitor/canvas.
* @returns {boolean}
*/
destroy: function () {
if (APP.initialised) {
APP.stats = null;
APP.display.destroy();
APP.display = null;
APP.initialised = false;
return true;
}
return false;
}
}
};
/**
* Stats class - calculates fps and ms statistics.
* @param history {number} History array span.
* @constructor
*/
var Stats = function (history) {
this.history_max = history;
this.fps = 0;
this.fps_min = Infinity;
this.fps_max = 0;
this.fps_avg = 0;
this.fps_total = 0;
this.fps_count = 0;
this.fps_calc_seconds = 0;
this.fps_calc_frames = 0;
this.fps_history = [];
this.ms = 0;
this.ms_min = 0;
this.ms_max = Infinity;
this.ms_avg = 0;
this.ms_total = 0;
this.ms_count = 0;
this.ms_history = [];
this.time_now = NOW();
};
/**
* Calculate and store this frame stats.
*/
Stats.prototype.nextFrame = function () {
var time_now = NOW(),
time_diff = time_now - this.time_now;
// Calculate ms
this.ms = time_diff;
this.ms_total += time_diff;
this.ms_count += 1;
this.storeMs(time_diff);
this.calcMsStats(time_diff);
// Calculate fps
this.fps_calc_seconds += time_diff;
this.fps_calc_frames += 1;
if (this.fps_calc_seconds >= 1000) {
this.fps = this.fps_calc_frames;
this.fps_total += this.fps_calc_frames;
this.fps_count += 1;
this.storeFps(this.fps_calc_frames);
this.calcFpsStats(this.fps_calc_frames);
// Reset values for next second
this.fps_calc_seconds = 0;
this.fps_calc_frames = 0;
}
// Update time of this frame
this.time_now = time_now;
};
/**
* Calculate min, max and average ms.
* @param num {number}
*/
Stats.prototype.calcMsStats = function (num) {
if (num > this.ms_min) {
this.ms_min = num;
}
if (num < this.ms_max) {
this.ms_max = num;
}
this.ms_avg = Math.round(this.ms_total / this.ms_count);
};
/**
* Calculate min, max and average fps.
* @param num {number}
*/
Stats.prototype.calcFpsStats = function (num) {
if (num > this.fps_max) {
this.fps_max = num;
}
if (num < this.fps_min) {
this.fps_min = num;
}
this.fps_avg = Math.round(this.fps_total / this.fps_count);
};
/**
* Save this ms in history.
* @param num {number}
*/
Stats.prototype.storeMs = function (num) {
this.ms_history.push(num);
if (this.ms_history.length > this.history_max) {
this.ms_history.shift();
}
};
/**
* Save this fps in history.
* @param num {number}
*/
Stats.prototype.storeFps = function (num) {
this.fps_history.push(num);
if (this.fps_history.length > this.history_max) {
this.fps_history.shift();
}
};
/**
* Graph plotter class for stats in history array.
* @param width {number} Width of graph area.
* @param height {number} Height of graph area.
* @param left {number} Left position of graph area.
* @param bottom {number} Bottom position of graph area.
* @param top_val {number} Top value for the graph values.
* @param limit_good {number} Limit value for graph color.
* @param limit_warning {number} Limit value for graph color.
* @param limit_poor {number} Limit value for graph color.
* @param logic_invert {boolean} If true reversed logic is applied - higher values are worse and lower values are better.
* @constructor
*/
var Graph = function (width, height, left, bottom, top_val, limit_good, limit_warning, limit_poor, logic_invert) {
this.width = width;
this.height = height;
this.bottom = bottom;
this.left = left;
this.limit_good = limit_good;
this.limit_warning = limit_warning;
this.limit_poor = limit_poor;
// Calculate scale factor for values based on the top value and graph height
this.scale = height / top_val;
// Set invert login flag - attribute is optional
this.logic_invert = (typeof logic_invert !== 'undefined' && logic_invert === true);
};
/**
* Colors for different limit types.
* @type {{excellent: string, good: string, warning: string, poor: string}}
*/
Graph.colors = {
// Lime
excellent: '#0BE400',
// Green
good: '#0DAC05',
// Orange
warning: '#d88c08',
// Red
poor: '#d80808'
};
/**
* Returns color based on passed value and limits.
* (handles also inverted scoring)
* @param value {number}
* @returns {string} Hexdec color
*/
Graph.prototype.getColor = function (value) {
var color = this.constructor.colors;
if (this.logic_invert) {
if (value > this.limit_poor) {
return color.poor;
} else if (value <= this.limit_good) {
return color.excellent;
} else if (value <= this.limit_warning) {
return color.good;
} else if (value <= this.limit_poor) {
return color.warning;
}
} else {
if (value < this.limit_poor) {
return color.poor;
} else if (value < this.limit_warning) {
return color.warning;
} else if (value < this.limit_good) {
return color.good;
} else {
return color.excellent;
}
}
};
/**
* Draw the graph.
* @param ctx {object} Canvas context object.
* @param dataset {array} History of values.
*/
Graph.prototype.render = function (ctx, dataset) {
var len = dataset.length,
i_end = len - this.width < 0 ? 0 : len - this.width,
r_w = 1,
r_x,
r_y,
r_h,
value,
i,
x;
// Loops through the dataset - limit loop by graph width
for (i = len - 1, x = this.width; i >= i_end; i -= 1, x -= 1) {
value = dataset[i];
// Calculate dimensions fot the value bar
r_x = this.left + x - 1;
r_h = Math.round(value * this.scale);
if (r_h <= 0) r_h = 1;
if (r_h > this.height) r_h = this.height;
r_y = APP.settings.height - this.bottom - r_h;
// Draw bar for this value
ctx.beginPath();
ctx.rect(r_x, r_y, r_w, r_h);
ctx.fillStyle = this.getColor(value);
ctx.fill();
}
};
/**
* Display class.
* Creates canvas element and place it in the page.
* Contains views for the monitor.
* @param width {number}
* @param height {number}
* @constructor
*/
var Display = function (width, height) {
var body = document.querySelector('body'),
canvas = document.createElement('canvas');
if (!canvas.getContext) {
this.supported = false;
alert('You browser doesn\'t support canvas!');
} else {
canvas.width = width;
canvas.height = height;
canvas.display = 'block';
canvas.style.position = 'fixed';
canvas.style.top = 0;
canvas.style.right = 0;
canvas.style.left = 'auto';
canvas.style.zIndex = 999999999;
body.appendChild(canvas);
this.canvas = canvas;
this.ctx = canvas.getContext('2d');
this.supported = true;
}
};
/**
* Display general settings
* @type {{color: {bg: string, value: string, nfo: string}, font: string, char: {max: string, min: string, avg: string}}}
*/
Display.settings = {
color : {
bg: 'rgba(0,0,0,0.85)',
value: 'white',
nfo: '#999999'
},
font: 'sans-serif',
char: {
// Arrow up char
max: '\u25B2',
// Star char
min: '\u25BC',
// Arrow down char
avg: '\u2605'
}
};
/**
* Clear the canvas and set the background
*/
Display.prototype.clear = function () {
var ctx = this.ctx;
ctx.clearRect(0, 0, this.canvas.width, this.canvas.height);
ctx.beginPath();
ctx.rect(0, 0, this.canvas.width, this.canvas.height);
ctx.fillStyle = this.constructor.settings.color.bg;
ctx.fill();
};
/**
* Internally use to keep track of current fps and update only when new fps value is added (not at every frame).
* @type {null|number}
*/
Display._fps_refresh = null;
/**
* Set Graph instance for the FPS view.
* @type {Graph}
*/
Display.FpsGraph = new Graph(APP.settings.width, APP.settings.height - 20, 0, 0, 70, 60, 30, 25);
/**
* Display view - FPS.
* Full width graph, current fps, max fps, min fps and average fps.
*/
Display.prototype.viewFps = function () {
var ctx = this.ctx,
self = this.constructor,
color = self.settings.color,
char = self.settings.char,
font = self.settings.font,
fps_refresh = self._fps_refresh;
// Re-render canvas only on new fps value
if (fps_refresh !== APP.stats.fps_count) {
fps_refresh = APP.stats.fps_count;
this.clear();
ctx.font = 'bold 9px ' + font;
ctx.textAlign = 'left';
ctx.fillStyle = color.value;
ctx.fillText(APP.stats.fps + ' fps', 5, 12);
ctx.font = '9px ' + font;
ctx.textAlign = 'right';
ctx.fillStyle = color.nfo;
ctx.fillText(char.max + APP.stats.fps_max + ' ' + char.avg + APP.stats.fps_avg + ' ' + char.min + (APP.stats.fps_min === Infinity ? 0 : APP.stats.fps_min), this.canvas.width - 5, 12);
self.FpsGraph.render(ctx, APP.stats.fps_history);
}
};
/**
* Set Graph instance for the MS view.
* @type {Graph}
*/
Display.MsGraph = new Graph(APP.settings.width, APP.settings.height - 20, 0, 0, 100, 18, 34, 40, true);
/**
* Display view - MS (ms delay between frames).
* Full width graph, current ms, max ms, min ms and average ms.
*/
Display.prototype.viewMs = function () {
var ctx = this.ctx,
self = this.constructor,
color = self.settings.color,
char = self.settings.char,
font = self.settings.font;
this.clear();
ctx.font = 'bold 9px ' + font;
ctx.textAlign = 'left';
ctx.fillStyle = color.value;
ctx.fillText(APP.stats.ms + ' ms', 5, 12);
ctx.font = '9px ' + font;
ctx.textAlign = 'right';
ctx.fillStyle = color.nfo;
ctx.fillText(char.max + (APP.stats.ms_max === Infinity ? 0 : APP.stats.ms_max) + ' ' + char.avg + APP.stats.ms_avg + ' ' + char.min + APP.stats.ms_min, this.canvas.width - 5, 12);
self.MsGraph.render(ctx, APP.stats.ms_history);
};
/**
* Display half layout - gutter value
* @type {number}
*/
Display._half_gutter = 2;
/**
* Display half layout - with of the half
* @type {number}
*/
Display._half_width = Math.round((APP.settings.width - Display._half_gutter) / 2);
/**
* Display half layout - X position of the 2nd half
* @type {number}
*/
Display._half_2nd_col_x = Display._half_width + Display._half_gutter;
/**
* Set Graph instance for the ALL view - FPS half graph.
* @type {Graph}
*/
Display.AllFpsGraph = new Graph(Display._half_width, APP.settings.height - 20, 0, 0, 70, 60, 30, 25);
/**
* Set Graph instance for the ALL view - MS half graph.
* @type {Graph}
*/
Display.AllMsGraph = new Graph(Display._half_width, APP.settings.height - 20, Display._half_2nd_col_x, 0, 100, 18, 34, 40, true);
/**
* Display view - ALL - split view for FPS and MS.
* Half width fps graph, current fps, average fps, half width ms graph, current ms and average ms.
*/
Display.prototype.viewAll = function () {
var ctx = this.ctx,
self = this.constructor,
color = self.settings.color,
char = self.settings.char,
font = self.settings.font;
this.clear();
ctx.clearRect(self._half_width, 0, self._half_gutter, this.canvas.height);
ctx.font = 'bold 9px ' + font;
ctx.textAlign = 'left';
ctx.fillStyle = color.value;
ctx.fillText(APP.stats.fps + ' fps', 5, 12);
ctx.fillText(APP.stats.ms + ' ms', self._half_2nd_col_x + 5, 12);
ctx.font = '9px ' + font;
ctx.textAlign = 'right';
ctx.fillStyle = color.nfo;
ctx.fillText(char.avg + APP.stats.fps_avg, self._half_width - 5, 12);
ctx.fillText(char.avg + APP.stats.ms_avg, this.canvas.width - 5, 12);
self.AllFpsGraph.render(ctx, APP.stats.fps_history);
self.AllMsGraph.render(ctx, APP.stats.ms_history);
};
/**
* Display view - SIMPLE - split view for FPS and MS values in colored block reflecting the performance.
* Current fps, average fps, current ms and average ms.
*/
Display.prototype.viewSimple = function () {
var ctx = this.ctx,
self = this.constructor,
color = self.settings.color,
char = self.settings.char,
font = self.settings.font,
half_center = Math.round(self._half_width / 2);
this.clear();
ctx.clearRect(self._half_width, 0, self._half_gutter, this.canvas.height);
ctx.beginPath();
ctx.rect(0, 0, self._half_width, APP.settings.height);
ctx.fillStyle = self.AllFpsGraph.getColor(APP.stats.fps);
ctx.fill();
ctx.beginPath();
ctx.rect(self._half_2nd_col_x, 0, self._half_width, APP.settings.height);
ctx.fillStyle = self.AllMsGraph.getColor(APP.stats.ms);
ctx.fill();
ctx.font = 'bold 28px ' + font;
ctx.textAlign = 'center';
ctx.fillStyle = color.value;
ctx.fillText(APP.stats.fps, half_center, 32);
ctx.fillText(APP.stats.ms, self._half_2nd_col_x + half_center, 32);
ctx.font = '9px ' + font;
ctx.fillText(char.avg + APP.stats.fps_avg + ' fps', half_center, 44);
ctx.fillText(char.avg + APP.stats.ms_avg + ' ms', self._half_2nd_col_x + half_center, 44);
};
/**
* Remove canvas from the DOM.
*/
Display.prototype.destroy = function () {
this.canvas.remove();
};
// Check if request animation frame feature is available
if (RAF === false) {
alert('You browser doesn\'t support requestAnimationFrame!');
} else {
// Use existing or this APP as an active object and expose public API interface
if (old_APP === false) {
window.RAFstats = active_APP = APP.api;
} else {
active_APP = old_APP;
}
// If not running start app
if (!active_APP.initialised) {
active_APP.init();
}
}
}(window, document));
| {
"content_hash": "f352cdd6dc480173e9b7ccbf4b87e1fb",
"timestamp": "",
"source": "github",
"line_count": 939,
"max_line_length": 197,
"avg_line_length": 29.49094781682641,
"alnum_prop": 0.45825509172324136,
"repo_name": "msrch/raf-stats",
"id": "1d8f7a4bd8e144a8a3601bfcb00dc75035c73ec9",
"size": "27692",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/raf-stats.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "19796"
}
],
"symlink_target": ""
} |
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.runtime.commands;
import com.google.common.collect.ImmutableSet;
import com.google.devtools.build.lib.analysis.NoBuildEvent;
import com.google.devtools.build.lib.runtime.BlazeCommand;
import com.google.devtools.build.lib.runtime.BlazeCommandResult;
import com.google.devtools.build.lib.runtime.Command;
import com.google.devtools.build.lib.runtime.CommandEnvironment;
import com.google.devtools.build.lib.util.ExitCode;
import com.google.devtools.build.lib.util.ResourceFileLoader;
import com.google.devtools.build.lib.util.io.OutErr;
import com.google.devtools.common.options.OptionsParsingResult;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
/** A command that prints an embedded license text. */
@Command(
name = "license",
allowResidue = true,
mustRunInWorkspace = false,
shortDescription = "Prints the license of this software.",
help = "Prints the license of this software.\n\n%{options}"
)
public class LicenseCommand implements BlazeCommand {
private static final ImmutableSet<String> JAVA_LICENSE_FILES =
ImmutableSet.of("ASSEMBLY_EXCEPTION", "DISCLAIMER", "LICENSE", "THIRD_PARTY_README");
public static boolean isSupported() {
return ResourceFileLoader.resourceExists(LicenseCommand.class, "LICENSE");
}
@Override
public BlazeCommandResult exec(CommandEnvironment env, OptionsParsingResult options) {
env.getEventBus().post(new NoBuildEvent());
OutErr outErr = env.getReporter().getOutErr();
outErr.printOutLn("Licenses of all components included in this binary:\n");
try {
outErr.printOutLn(ResourceFileLoader.loadResource(this.getClass(), "LICENSE"));
} catch (IOException e) {
throw new IllegalStateException(
"I/O error while trying to print 'LICENSE' resource: " + e.getMessage(), e);
}
Path bundledJdk =
env.getDirectories()
.getEmbeddedBinariesRoot()
.getRelative("embedded_tools/jdk")
.getPathFile()
.toPath();
if (Files.exists(bundledJdk)) {
outErr.printOutLn(
"This binary comes with a bundled JDK, which contains the following license files:\n");
printJavaLicenseFiles(outErr, bundledJdk);
}
Path bundledJre =
env.getDirectories()
.getEmbeddedBinariesRoot()
.getRelative("embedded_tools/jre")
.getPathFile()
.toPath();
if (Files.exists(bundledJre)) {
outErr.printOutLn(
"This binary comes with a bundled JRE, which contains the following license files:\n");
printJavaLicenseFiles(outErr, bundledJre);
}
return BlazeCommandResult.exitCode(ExitCode.SUCCESS);
}
private static void printJavaLicenseFiles(OutErr outErr, Path bundledJdkOrJre) {
try {
Files.walkFileTree(
bundledJdkOrJre,
new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path path, BasicFileAttributes basicFileAttributes)
throws IOException {
if (JAVA_LICENSE_FILES.contains(path.getFileName().toString())) {
outErr.printOutLn(path + ":\n");
Files.copy(path, outErr.getOutputStream());
outErr.printOutLn("\n");
}
return super.visitFile(path, basicFileAttributes);
}
});
} catch (IOException e) {
throw new UncheckedIOException(
"I/O error while trying to print license file of bundled JDK or JRE: " + e.getMessage(),
e);
}
}
}
| {
"content_hash": "1398658469a40e8b270c800961fed0a5",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 98,
"avg_line_length": 38.49122807017544,
"alnum_prop": 0.6982680036463081,
"repo_name": "akira-baruah/bazel",
"id": "e9ab803a7215cbdbd602083905e0a6ae708edf99",
"size": "4388",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/main/java/com/google/devtools/build/lib/runtime/commands/LicenseCommand.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "14332"
},
{
"name": "C++",
"bytes": "1037443"
},
{
"name": "HTML",
"bytes": "18607"
},
{
"name": "Java",
"bytes": "27157305"
},
{
"name": "Makefile",
"bytes": "248"
},
{
"name": "PowerShell",
"bytes": "5536"
},
{
"name": "Python",
"bytes": "663737"
},
{
"name": "Roff",
"bytes": "511"
},
{
"name": "Shell",
"bytes": "1038494"
}
],
"symlink_target": ""
} |
#!/usr/bin/env bash
# Copyright 2017-2020 The Verible Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
usage() {
cat <<EOF
Inserts an action that stores syntax errors in the parser_param struct
before allowing bison error-recovery to discard it.
usage: $0 < inputfile > outputfile
Assumptions:
1) "++yynerrs;" is a unique line in the parser skeleton that corresponds
to the point at which a syntax error is initially detected,
prior to any error-recovery.
2) "param" is a ParserParam struct (from common/parser_param.h), whose
methods include RecordSyntaxError(const YYSTYPE&);
EOF
}
sed -e '/++yynerrs;/a\
// Automatically patched by '"$0"':\
param->RecordSyntaxError(yylval);\
// end of automatic patch\
'
# Intended transformation in yy.tab.cc (diff -u syntax):
cat > /dev/null <<EOF
/*------------------------------------.
| yyerrlab -- here on detecting error |
\`------------------------------------*/
yyerrlab:
/* If not already recovering from an error, report this error. */
if (!yyerrstatus)
{
++yynerrs;
+ // Automatically patched by $0:
+ GetParam(param)->RecordSyntaxError(yyla.value);
+ // end of automatic patch
#if ! YYERROR_VERBOSE
yyerror (YY_("syntax error"));
#else
EOF
| {
"content_hash": "1ac208bdbee3d119d14ff9d9c182709d",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 74,
"avg_line_length": 31.875,
"alnum_prop": 0.673389355742297,
"repo_name": "chipsalliance/verible",
"id": "073a1ef73b5a92ac7658fe70a83d6ff8702ccbad",
"size": "1785",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "common/parser/record_syntax_error.sh",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "3459"
},
{
"name": "C++",
"bytes": "6853412"
},
{
"name": "Dockerfile",
"bytes": "567"
},
{
"name": "JavaScript",
"bytes": "1512"
},
{
"name": "Lex",
"bytes": "58612"
},
{
"name": "Makefile",
"bytes": "166"
},
{
"name": "Nix",
"bytes": "558"
},
{
"name": "Python",
"bytes": "44099"
},
{
"name": "Shell",
"bytes": "169683"
},
{
"name": "Starlark",
"bytes": "257597"
},
{
"name": "TypeScript",
"bytes": "1711"
},
{
"name": "Yacc",
"bytes": "282973"
}
],
"symlink_target": ""
} |
@interface BYTableViewCellStyle : BYViewStyle
// text
@property BYText *title;
@property BYTextShadow<Optional> *titleShadow;
// background
@property UIColor<Optional> *backgroundColor;
@property BYGradient<Optional> *backgroundGradient;
@property BYBackgroundImage<Optional> *backgroundImage;
// border
@property BYBorder<Optional> *border;
@property BYShadow<Optional> *innerShadow;
@property BYShadow<Optional> *outerShadow;
// accessory views
@property UIImage<Optional> *accessoryViewImage;
@property UIImage<Optional> *editingAccessoryViewImage;
+(BYTableViewCellStyle*)defaultStyle;
@end | {
"content_hash": "54edf1a15759952b19354aedd2a463d1",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 55,
"avg_line_length": 26.08695652173913,
"alnum_prop": 0.81,
"repo_name": "sammyd/beautify-ios",
"id": "7f744ff55bfb897f9db85d67dd58a279300f6129",
"size": "939",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Beautify/BYTableViewCellStyle.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Objective-C",
"bytes": "395805"
}
],
"symlink_target": ""
} |
import _toConsumableArray from "@babel/runtime/helpers/toConsumableArray";
import _objectWithoutProperties from "@babel/runtime/helpers/objectWithoutProperties";
import _classCallCheck from "@babel/runtime/helpers/classCallCheck";
import _createClass from "@babel/runtime/helpers/createClass";
import _assertThisInitialized from "@babel/runtime/helpers/assertThisInitialized";
import _inherits from "@babel/runtime/helpers/inherits";
import _createSuper from "@babel/runtime/helpers/createSuper";
import _defineProperty from "@babel/runtime/helpers/defineProperty";
var _excluded = ["id"];
import { createScopedElement } from "../../lib/jsxRuntime";
import * as React from 'react';
import { classNames } from "../../lib/classNames";
import { isFunction } from "../../lib/utils";
import { transitionEvent } from "../../lib/supportEvents";
import { withPlatform } from "../../hoc/withPlatform";
import { withContext } from "../../hoc/withContext";
import ModalRootContext from "./ModalRootContext";
import { ConfigProviderContext, WebviewType } from "../ConfigProvider/ConfigProviderContext";
import { ModalType } from "./types";
import { ANDROID, VKCOM } from "../../lib/platform";
import { getClassName } from "../../helpers/getClassName";
import { withDOM } from "../../lib/dom";
import { getNavId } from "../../lib/getNavId";
import { warnOnce } from "../../lib/warnOnce";
import "./ModalRoot.css";
var warn = warnOnce('ModalRoot');
var IS_DEV = process.env.NODE_ENV === 'development';
var ModalRootDesktopComponent = /*#__PURE__*/function (_React$Component) {
_inherits(ModalRootDesktopComponent, _React$Component);
var _super = _createSuper(ModalRootDesktopComponent);
function ModalRootDesktopComponent(props) {
var _this;
_classCallCheck(this, ModalRootDesktopComponent);
_this = _super.call(this, props);
_defineProperty(_assertThisInitialized(_this), "modalsState", void 0);
_defineProperty(_assertThisInitialized(_this), "maskElementRef", void 0);
_defineProperty(_assertThisInitialized(_this), "maskAnimationFrame", void 0);
_defineProperty(_assertThisInitialized(_this), "modalRootContext", void 0);
_defineProperty(_assertThisInitialized(_this), "activeTransitions", void 0);
_defineProperty(_assertThisInitialized(_this), "handleKeyDownEsc", function (e) {
if (e.key === 'Escape') {
_this.triggerActiveModalClose();
}
});
_defineProperty(_assertThisInitialized(_this), "componentWillUnmount", function () {
_this.props.document.removeEventListener('keydown', _this.handleKeyDownEsc);
});
_defineProperty(_assertThisInitialized(_this), "prevNextSwitchEndHandler", function () {
_this.activeTransitions = Math.max(0, _this.activeTransitions - 1);
if (_this.activeTransitions > 0) {
return;
}
var activeModal = _this.state.nextModal;
var newState = {
prevModal: null,
nextModal: null,
visibleModals: [activeModal],
activeModal: activeModal,
animated: false,
switching: false
};
if (!activeModal) {
newState.history = [];
}
_this.setState(newState);
});
_defineProperty(_assertThisInitialized(_this), "triggerActiveModalClose", function () {
var activeModalState = _this.modalsState[_this.state.activeModal];
if (activeModalState) {
_this.doCloseModal(activeModalState);
}
});
_defineProperty(_assertThisInitialized(_this), "doCloseModal", function (modalState) {
if (isFunction(modalState.onClose)) {
modalState.onClose();
} else if (isFunction(_this.props.onClose)) {
_this.props.onClose(modalState.id);
} else if (IS_DEV) {
warn('onClose is undefined');
}
});
var _activeModal = props.activeModal;
_this.state = {
activeModal: null,
prevModal: null,
nextModal: _activeModal,
visibleModals: _activeModal ? [_activeModal] : [],
animated: !!_activeModal,
switching: false,
history: _activeModal ? [_activeModal] : [],
isBack: false,
inited: false
};
_this.maskElementRef = /*#__PURE__*/React.createRef();
_this.activeTransitions = 0;
_this.initModalsState();
_this.modalRootContext = {
updateModalHeight: function updateModalHeight() {
return undefined;
},
registerModal: function registerModal(_ref) {
var id = _ref.id,
data = _objectWithoutProperties(_ref, _excluded);
return Object.assign(_this.modalsState[id], data);
},
onClose: _this.triggerActiveModalClose,
isInsideModal: true
};
return _this;
}
_createClass(ModalRootDesktopComponent, [{
key: "modals",
get: function get() {
return React.Children.toArray(this.props.children);
}
}, {
key: "initModalsState",
value: function initModalsState() {
this.modalsState = this.modals.reduce(function (acc, Modal) {
var modalProps = Modal.props;
var state = {
id: getNavId(Modal.props, warn),
onClose: Modal.props.onClose,
dynamicContentHeight: !!modalProps.dynamicContentHeight
}; // ModalPage props
if (typeof modalProps.settlingHeight === 'number') {
state.settlingHeight = modalProps.settlingHeight;
}
acc[state.id] = state;
return acc;
}, {});
}
}, {
key: "componentDidMount",
value: function componentDidMount() {
this.initActiveModal();
this.props.document.addEventListener('keydown', this.handleKeyDownEsc);
}
}, {
key: "componentDidUpdate",
value: function componentDidUpdate(prevProps, prevState) {
var _this2 = this;
if (this.props.activeModal !== prevProps.activeModal) {
var nextModal = this.props.activeModal;
var prevModal = prevProps.activeModal;
if (IS_DEV && nextModal !== null && !this.modalsState[nextModal]) {
return warn("[ModalRoot.componentDidUpdate] nextModal ".concat(nextModal, " not found"));
}
var history = _toConsumableArray(this.state.history);
var isBack = false;
if (nextModal === null) {
history = [];
} else if (history.includes(nextModal)) {
history = history.splice(0, history.indexOf(nextModal) + 1);
isBack = true;
} else {
history.push(nextModal);
}
return this.setState({
activeModal: null,
nextModal: nextModal,
prevModal: prevModal,
visibleModals: [nextModal, prevModal],
history: history,
isBack: isBack,
animated: true,
inited: false,
switching: false
}, function () {
if (nextModal === null) {
_this2.closeActiveModal();
} else {
_this2.initActiveModal();
}
});
}
if (this.state.switching && !prevState.switching) {
requestAnimationFrame(function () {
return _this2.switchPrevNext();
});
}
}
/**
* Инициализирует модалку перед анимацией открытия
*/
}, {
key: "initActiveModal",
value: function initActiveModal() {
var activeModal = this.state.activeModal || this.state.nextModal;
if (!activeModal) {
return;
}
var modalState = this.modalsState[activeModal];
switch (modalState.type) {
case ModalType.PAGE:
modalState.settlingHeight = modalState.settlingHeight || 75;
break;
case ModalType.CARD:
break;
default:
if (IS_DEV) {
warn('[initActiveModal] modalState.type is unknown');
}
}
this.setState({
inited: true,
switching: true
});
}
}, {
key: "closeActiveModal",
value: function closeActiveModal() {
var prevModal = this.state.prevModal;
if (IS_DEV && !prevModal) {
return warn("[closeActiveModal] prevModal is ".concat(prevModal));
}
var prevModalState = this.modalsState[prevModal];
this.waitTransitionFinish(prevModalState, this.prevNextSwitchEndHandler);
this.animateModalOpacity(prevModalState, false);
this.setMaskOpacity(prevModalState, 0);
}
}, {
key: "waitTransitionFinish",
value: function waitTransitionFinish(modalState, eventHandler) {
if (transitionEvent.supported) {
var onceHandler = function onceHandler() {
modalState.innerElement.removeEventListener(transitionEvent.name, onceHandler);
eventHandler();
};
modalState.innerElement.addEventListener(transitionEvent.name, onceHandler);
} else {
setTimeout(eventHandler, this.props.platform === ANDROID || this.props.platform === VKCOM ? 320 : 400);
}
}
}, {
key: "switchPrevNext",
value: function switchPrevNext() {
var _this3 = this;
var _this$state = this.state,
prevModal = _this$state.prevModal,
nextModal = _this$state.nextModal;
var prevModalState = this.modalsState[prevModal];
var nextModalState = this.modalsState[nextModal];
if (IS_DEV && !prevModalState && !nextModalState) {
return warn("[switchPrevNext] prevModal is ".concat(prevModal, ", nextModal is ").concat(nextModal));
}
var prevIsCard = !!prevModalState && prevModalState.type === ModalType.CARD;
var nextIsPage = !!nextModalState && nextModalState.type === ModalType.PAGE;
var nextIsCard = !!nextModalState && nextModalState.type === ModalType.CARD; // Ждём полного скрытия предыдущей модалки
if (prevModalState && (nextIsCard || prevIsCard && nextIsPage)) {
this.activeTransitions += 1;
this.waitTransitionFinish(prevModalState, function () {
_this3.waitTransitionFinish(nextModalState, _this3.prevNextSwitchEndHandler);
_this3.animateModalOpacity(nextModalState, true);
});
requestAnimationFrame(function () {
_this3.animateModalOpacity(prevModalState, false);
});
return;
}
if (prevModalState && nextIsPage) {
this.activeTransitions += 1;
this.waitTransitionFinish(prevModalState, this.prevNextSwitchEndHandler);
requestAnimationFrame(function () {
_this3.animateModalOpacity(prevModalState, false);
});
}
this.activeTransitions += 1;
this.waitTransitionFinish(nextModalState, this.prevNextSwitchEndHandler);
requestAnimationFrame(function () {
_this3.animateModalOpacity(nextModalState, true);
});
}
}, {
key: "animateModalOpacity",
value:
/* Анимирует сдивг модалки */
function animateModalOpacity(modalState, display) {
modalState.innerElement.style.opacity = display ? '1' : '0';
}
/* Устанавливает прозрачность для полупрозрачной подложки */
}, {
key: "setMaskOpacity",
value: function setMaskOpacity(modalState) {
var _this4 = this;
var forceOpacity = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : null;
if (forceOpacity === null && this.state.history[0] !== modalState.id) {
return;
}
cancelAnimationFrame(this.maskAnimationFrame);
this.maskAnimationFrame = requestAnimationFrame(function () {
if (_this4.maskElementRef.current) {
var translateY = modalState.translateY,
translateYCurrent = modalState.translateYCurrent;
var opacity = forceOpacity === null ? 1 - (translateYCurrent - translateY) / (100 - translateY) || 0 : forceOpacity;
_this4.maskElementRef.current.style.opacity = Math.max(0, Math.min(100, opacity)).toString();
}
});
}
/**
* Закрывает текущую модалку
*/
}, {
key: "render",
value: function render() {
var _this$state2 = this.state,
prevModal = _this$state2.prevModal,
activeModal = _this$state2.activeModal,
nextModal = _this$state2.nextModal,
visibleModals = _this$state2.visibleModals,
animated = _this$state2.animated;
if (!activeModal && !prevModal && !nextModal && !animated) {
return null;
}
return createScopedElement(ModalRootContext.Provider, {
value: this.modalRootContext
}, createScopedElement("div", {
vkuiClass: classNames(getClassName('ModalRoot', this.props.platform), {
'ModalRoot--vkapps': this.props.configProvider.webviewType === WebviewType.VKAPPS
}, 'ModalRoot--desktop')
}, createScopedElement("div", {
vkuiClass: "ModalRoot__mask",
onClick: this.triggerActiveModalClose,
ref: this.maskElementRef
}), createScopedElement("div", {
vkuiClass: "ModalRoot__viewport"
}, this.modals.map(function (Modal) {
var modalId = getNavId(Modal.props, warn);
if (!visibleModals.includes(modalId)) {
return null;
}
var key = "modal-".concat(modalId);
return createScopedElement("div", {
key: key,
vkuiClass: classNames('ModalRoot__modal', {
'ModalRoot__modal--active': modalId === activeModal,
'ModalRoot__modal--prev': modalId === prevModal,
'ModalRoot__modal--next': modalId === nextModal
})
}, Modal);
}))));
}
}]);
return ModalRootDesktopComponent;
}(React.Component);
export var ModalRootDesktop = withContext(withPlatform(withDOM(ModalRootDesktopComponent)), ConfigProviderContext, 'configProvider');
//# sourceMappingURL=ModalRootDesktop.js.map | {
"content_hash": "7503847a78af2e4e421b57cedb4c39f5",
"timestamp": "",
"source": "github",
"line_count": 410,
"max_line_length": 133,
"avg_line_length": 33.4390243902439,
"alnum_prop": 0.6331145149525893,
"repo_name": "cdnjs/cdnjs",
"id": "f6e088883eb0f5cecc3239760d3c1f1ac9918d2a",
"size": "13882",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ajax/libs/vkui/4.18.0/cssm/components/ModalRoot/ModalRootDesktop.js",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
using System;
using C = System.Collections;
using G = System.Collections.Generic;
using U = System.Collections.Concurrent;
using System.Threading;
using System.Threading.Tasks;
using ZBrad.AsyncLib.Collections;
namespace ZBrad.AsyncLib
{
public class WaitQueue<N> : IAsyncQueue<N>, IWaitable where N : IEquatable<N>
{
AsyncQueue<N> queue = new AsyncQueue<N>();
bool isEnded = false;
bool isComplete = false;
#region IAsyncQueue
public IQueue<N> Queue { get { return queue.Queue; } }
public AwaitLock Lock { get { return queue.Lock; } }
public int Count { get { return queue.Count; } }
public Task CopyTo(N[] array, int arrayIndex, CancellationToken token)
{
return queue.CopyTo(array, arrayIndex, token);
}
IAsyncEnumerator<N> IAsyncEnumerable<N>.GetAsyncEnumerator()
{
return queue.GetAsyncEnumerator();
}
#endregion
public async Task<N> Peek(CancellationToken token)
{
var trial = await TryPeek(token);
if (trial.Result)
return trial.Value;
return default(N);
}
/// <summary>
/// peek will not block if there are no items on the queue
/// </summary>
/// <param name="token"></param>
/// <returns></returns>
public async Task<ITry<N>> TryPeek(CancellationToken token)
{
using (await queue.Lock.Wait(token))
{
if (isEnded || queue.Count == 0)
return Try<N>.False;
return new Try<N>(queue.Queue.Peek());
}
}
// we have to use list to allow cancel cleanup
LinkQueue<Waiter<N>> waiters = new LinkQueue<Waiter<N>>();
public int WaitCount { get { return waiters.Count; } }
public async Task EndEnqueue(CancellationToken token)
{
using (await queue.Lock.Wait(token))
{
isEnded = true;
}
}
public async Task<bool> IsEnded(CancellationToken token)
{
using (await queue.Lock.Wait(token))
{
return isEnded;
}
}
public async Task<bool> IsComplete(CancellationToken token)
{
if (isComplete)
return true;
using (await queue.Lock.Wait(token))
{
if (isEnded && queue.Count == 0)
{
isComplete = true;
return true;
}
return false;
}
}
public async Task<bool> IsEmpty(CancellationToken token)
{
using (await queue.Lock.Wait(token))
{
return queue.Count == 0;
}
}
public Task Enqueue(N item, CancellationToken token)
{
return TryEnqueue(item, token);
}
public async Task<bool> TryEnqueue(N item, CancellationToken token)
{
using (await queue.Lock.Wait(token))
{
// are we done adding?
if (isEnded)
return false;
// special case for havings waiters and no queue elements
if (waiters.Count > 0 && queue.Count == 0)
{
var w = waiters.Dequeue();
w.Completed(item);
return true;
}
queue.Queue.Enqueue(item);
completeWaiters();
}
return true;
}
void completeWaiters()
{
while (waiters.Count > 0 && queue.Count > 0)
{
var w = waiters.Dequeue();
var x = queue.Queue.Dequeue();
w.Completed(x);
}
}
public async Task<N> Dequeue(CancellationToken token)
{
Waiter<N> waiter = null;
using (await queue.Lock.Wait(token))
{
if (isComplete)
return default(N);
if (waiters.Count == 0 && queue.Count > 0)
return queue.Queue.Dequeue();
waiter = new Waiter<N>(token);
if (token != CancellationToken.None)
waiter.OnCancel += Waiter_OnCancel;
waiters.Enqueue(waiter);
completeWaiters();
}
var value = await waiter;
return value;
}
private void Waiter_OnCancel(Waiter<N> w)
{
waiters.Remove(w);
}
public Task TrimExcess(CancellationToken token)
{
return queue.TrimExcess(token);
}
}
}
| {
"content_hash": "5999b8cb079e9601e1038c8973fd324b",
"timestamp": "",
"source": "github",
"line_count": 185,
"max_line_length": 81,
"avg_line_length": 26.221621621621622,
"alnum_prop": 0.48897134611420323,
"repo_name": "zbrad/AsyncLib",
"id": "6b560f8d50135090be77b5463919a956b30940de",
"size": "4853",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/AsyncLibShared/Classes/WaitQueue.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "113"
},
{
"name": "C#",
"bytes": "278050"
}
],
"symlink_target": ""
} |
package amqp
| {
"content_hash": "07c1d43650b7d4a88aa983c724234692",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 12,
"avg_line_length": 13,
"alnum_prop": 0.8461538461538461,
"repo_name": "cloudifice/amqp",
"id": "0e8d02ac5983b3d9ab277114d10f06d0d890fa66",
"size": "13",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "amqp_test.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "3339"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('accounts', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='AccountRules',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('permissions', models.CharField(choices=[(b'A', b'Administration'), (b'W', b'Read/write'), (b'R', b'Read')], max_length=1)),
],
),
migrations.DeleteModel(
name='InvitationRequest',
),
migrations.AlterModelOptions(
name='account',
options={'ordering': ('create', 'name'), 'verbose_name': 'Account'},
),
migrations.RemoveField(
model_name='account',
name='user',
),
migrations.AlterField(
model_name='account',
name='create',
field=models.DateField(auto_now_add=True, verbose_name='Creation date'),
),
migrations.AddField(
model_name='accountrules',
name='account',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.Account'),
),
migrations.AddField(
model_name='accountrules',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='account',
name='users',
field=models.ManyToManyField(related_name='account', through='accounts.AccountRules', to=settings.AUTH_USER_MODEL),
),
]
| {
"content_hash": "5f480b94311d8498119451cfc7476522",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 141,
"avg_line_length": 35.2037037037037,
"alnum_prop": 0.5791688584955287,
"repo_name": "sebastienbarbier/723e_server",
"id": "41fc6dff72dc73ef279bea6d59eebb49e65388dc",
"size": "1974",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "seven23/models/accounts/migrations/0002_auto_20161128_1335.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "182572"
},
{
"name": "Makefile",
"bytes": "352"
},
{
"name": "Python",
"bytes": "99185"
}
],
"symlink_target": ""
} |
require 'spec_helper'
describe Project do
describe 'associations' do
it { is_expected.to belong_to(:group) }
it { is_expected.to belong_to(:namespace) }
it { is_expected.to belong_to(:creator).class_name('User') }
it { is_expected.to have_many(:users) }
it { is_expected.to have_many(:events).dependent(:destroy) }
it { is_expected.to have_many(:merge_requests).dependent(:destroy) }
it { is_expected.to have_many(:issues).dependent(:destroy) }
it { is_expected.to have_many(:milestones).dependent(:destroy) }
it { is_expected.to have_many(:project_members).dependent(:destroy) }
it { is_expected.to have_many(:notes).dependent(:destroy) }
it { is_expected.to have_many(:snippets).class_name('ProjectSnippet').dependent(:destroy) }
it { is_expected.to have_many(:deploy_keys_projects).dependent(:destroy) }
it { is_expected.to have_many(:deploy_keys) }
it { is_expected.to have_many(:hooks).dependent(:destroy) }
it { is_expected.to have_many(:protected_branches).dependent(:destroy) }
it { is_expected.to have_one(:forked_project_link).dependent(:destroy) }
it { is_expected.to have_one(:slack_service).dependent(:destroy) }
it { is_expected.to have_one(:pushover_service).dependent(:destroy) }
it { is_expected.to have_one(:asana_service).dependent(:destroy) }
end
describe 'modules' do
subject { described_class }
it { is_expected.to include_module(Gitlab::ConfigHelper) }
it { is_expected.to include_module(Gitlab::ShellAdapter) }
it { is_expected.to include_module(Gitlab::VisibilityLevel) }
it { is_expected.to include_module(Referable) }
it { is_expected.to include_module(Sortable) }
end
describe 'validation' do
let!(:project) { create(:project) }
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_uniqueness_of(:name).scoped_to(:namespace_id) }
it { is_expected.to validate_length_of(:name).is_within(0..255) }
it { is_expected.to validate_presence_of(:path) }
it { is_expected.to validate_uniqueness_of(:path).scoped_to(:namespace_id) }
it { is_expected.to validate_length_of(:path).is_within(0..255) }
it { is_expected.to validate_length_of(:description).is_within(0..2000) }
it { is_expected.to validate_presence_of(:creator) }
it { is_expected.to validate_length_of(:issues_tracker_id).is_within(0..255) }
it { is_expected.to validate_presence_of(:namespace) }
it 'should not allow new projects beyond user limits' do
project2 = build(:project)
allow(project2).to receive(:creator).and_return(double(can_create_project?: false, projects_limit: 0).as_null_object)
expect(project2).not_to be_valid
expect(project2.errors[:limit_reached].first).to match(/Your project limit is 0/)
end
end
describe 'Respond to' do
it { is_expected.to respond_to(:url_to_repo) }
it { is_expected.to respond_to(:repo_exists?) }
it { is_expected.to respond_to(:satellite) }
it { is_expected.to respond_to(:update_merge_requests) }
it { is_expected.to respond_to(:execute_hooks) }
it { is_expected.to respond_to(:name_with_namespace) }
it { is_expected.to respond_to(:owner) }
it { is_expected.to respond_to(:path_with_namespace) }
end
describe '#to_reference' do
let(:project) { create(:empty_project) }
it 'returns a String reference to the object' do
expect(project.to_reference).to eq project.path_with_namespace
end
end
it 'should return valid url to repo' do
project = Project.new(path: 'somewhere')
expect(project.url_to_repo).to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + 'somewhere.git')
end
describe "#web_url" do
let(:project) { create(:empty_project, path: "somewhere") }
it 'returns the full web URL for this repo' do
expect(project.web_url).to eq("#{Gitlab.config.gitlab.url}/#{project.namespace.path}/somewhere")
end
end
describe "#web_url_without_protocol" do
let(:project) { create(:empty_project, path: "somewhere") }
it 'returns the web URL without the protocol for this repo' do
expect(project.web_url_without_protocol).to eq("#{Gitlab.config.gitlab.url.split('://')[1]}/#{project.namespace.path}/somewhere")
end
end
describe 'last_activity methods' do
let(:project) { create(:project) }
let(:last_event) { double(created_at: Time.now) }
describe 'last_activity' do
it 'should alias last_activity to last_event' do
allow(project).to receive(:last_event).and_return(last_event)
expect(project.last_activity).to eq(last_event)
end
end
describe 'last_activity_date' do
it 'returns the creation date of the project\'s last event if present' do
last_activity_event = create(:event, project: project)
expect(project.last_activity_at.to_i).to eq(last_event.created_at.to_i)
end
it 'returns the project\'s last update date if it has no events' do
expect(project.last_activity_date).to eq(project.updated_at)
end
end
end
describe '#get_issue' do
let(:project) { create(:empty_project) }
let(:issue) { create(:issue, project: project) }
context 'with default issues tracker' do
it 'returns an issue' do
expect(project.get_issue(issue.iid)).to eq issue
end
it 'returns nil when no issue found' do
expect(project.get_issue(999)).to be_nil
end
end
context 'with external issues tracker' do
before do
allow(project).to receive(:default_issues_tracker?).and_return(false)
end
it 'returns an ExternalIssue' do
issue = project.get_issue('FOO-1234')
expect(issue).to be_kind_of(ExternalIssue)
expect(issue.iid).to eq 'FOO-1234'
expect(issue.project).to eq project
end
end
end
describe '#issue_exists?' do
let(:project) { create(:empty_project) }
it 'is truthy when issue exists' do
expect(project).to receive(:get_issue).and_return(double)
expect(project.issue_exists?(1)).to be_truthy
end
it 'is falsey when issue does not exist' do
expect(project).to receive(:get_issue).and_return(nil)
expect(project.issue_exists?(1)).to be_falsey
end
end
describe :update_merge_requests do
let(:project) { create(:project) }
let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
let(:key) { create(:key, user_id: project.owner.id) }
let(:prev_commit_id) { merge_request.commits.last.id }
let(:commit_id) { merge_request.commits.first.id }
it 'should close merge request if last commit from source branch was pushed to target branch' do
project.update_merge_requests(prev_commit_id, commit_id, "refs/heads/#{merge_request.target_branch}", key.user)
merge_request.reload
expect(merge_request.merged?).to be_truthy
end
it 'should update merge request commits with new one if pushed to source branch' do
project.update_merge_requests(prev_commit_id, commit_id, "refs/heads/#{merge_request.source_branch}", key.user)
merge_request.reload
expect(merge_request.last_commit.id).to eq(commit_id)
end
end
describe :find_with_namespace do
context 'with namespace' do
before do
@group = create :group, name: 'gitlab'
@project = create(:project, name: 'gitlabhq', namespace: @group)
end
it { expect(Project.find_with_namespace('gitlab/gitlabhq')).to eq(@project) }
it { expect(Project.find_with_namespace('gitlab-ci')).to be_nil }
end
end
describe :to_param do
context 'with namespace' do
before do
@group = create :group, name: 'gitlab'
@project = create(:project, name: 'gitlabhq', namespace: @group)
end
it { expect(@project.to_param).to eq('gitlabhq') }
end
end
describe :repository do
let(:project) { create(:project) }
it 'should return valid repo' do
expect(project.repository).to be_kind_of(Repository)
end
end
describe :default_issues_tracker? do
let(:project) { create(:project) }
let(:ext_project) { create(:redmine_project) }
it "should be true if used internal tracker" do
expect(project.default_issues_tracker?).to be_truthy
end
it "should be false if used other tracker" do
expect(ext_project.default_issues_tracker?).to be_falsey
end
end
describe :can_have_issues_tracker_id? do
let(:project) { create(:project) }
let(:ext_project) { create(:redmine_project) }
it 'should be true for projects with external issues tracker if issues enabled' do
expect(ext_project.can_have_issues_tracker_id?).to be_truthy
end
it 'should be false for projects with internal issue tracker if issues enabled' do
expect(project.can_have_issues_tracker_id?).to be_falsey
end
it 'should be always false if issues disabled' do
project.issues_enabled = false
ext_project.issues_enabled = false
expect(project.can_have_issues_tracker_id?).to be_falsey
expect(ext_project.can_have_issues_tracker_id?).to be_falsey
end
end
describe :open_branches do
let(:project) { create(:project) }
before do
project.protected_branches.create(name: 'master')
end
it { expect(project.open_branches.map(&:name)).to include('feature') }
it { expect(project.open_branches.map(&:name)).not_to include('master') }
end
describe '#star_count' do
it 'counts stars from multiple users' do
user1 = create :user
user2 = create :user
project = create :project, :public
expect(project.star_count).to eq(0)
user1.toggle_star(project)
expect(project.reload.star_count).to eq(1)
user2.toggle_star(project)
project.reload
expect(project.reload.star_count).to eq(2)
user1.toggle_star(project)
project.reload
expect(project.reload.star_count).to eq(1)
user2.toggle_star(project)
project.reload
expect(project.reload.star_count).to eq(0)
end
it 'counts stars on the right project' do
user = create :user
project1 = create :project, :public
project2 = create :project, :public
expect(project1.star_count).to eq(0)
expect(project2.star_count).to eq(0)
user.toggle_star(project1)
project1.reload
project2.reload
expect(project1.star_count).to eq(1)
expect(project2.star_count).to eq(0)
user.toggle_star(project1)
project1.reload
project2.reload
expect(project1.star_count).to eq(0)
expect(project2.star_count).to eq(0)
user.toggle_star(project2)
project1.reload
project2.reload
expect(project1.star_count).to eq(0)
expect(project2.star_count).to eq(1)
user.toggle_star(project2)
project1.reload
project2.reload
expect(project1.star_count).to eq(0)
expect(project2.star_count).to eq(0)
end
it 'is decremented when an upvoter account is deleted' do
user = create :user
project = create :project, :public
user.toggle_star(project)
project.reload
expect(project.star_count).to eq(1)
user.destroy
project.reload
expect(project.star_count).to eq(0)
end
end
describe :avatar_type do
let(:project) { create(:project) }
it 'should be true if avatar is image' do
project.update_attribute(:avatar, 'uploads/avatar.png')
expect(project.avatar_type).to be_truthy
end
it 'should be false if avatar is html page' do
project.update_attribute(:avatar, 'uploads/avatar.html')
expect(project.avatar_type).to eq(['only images allowed'])
end
end
describe :avatar_url do
subject { project.avatar_url }
let(:project) { create(:project) }
context 'When avatar file is uploaded' do
before do
project.update_columns(avatar: 'uploads/avatar.png')
allow(project.avatar).to receive(:present?) { true }
end
let(:avatar_path) do
"/uploads/project/avatar/#{project.id}/uploads/avatar.png"
end
it { should eq "http://localhost#{avatar_path}" }
end
context 'When avatar file in git' do
before do
allow(project).to receive(:avatar_in_git) { true }
end
let(:avatar_path) do
"/#{project.namespace.name}/#{project.path}/avatar"
end
it { should eq "http://localhost#{avatar_path}" }
end
end
end
| {
"content_hash": "a6ffda71e85afd3fbc306718523f140d",
"timestamp": "",
"source": "github",
"line_count": 373,
"max_line_length": 135,
"avg_line_length": 33.61662198391421,
"alnum_prop": 0.6621740170667517,
"repo_name": "sakishum/gitlabhq",
"id": "5d40754d59dda7c36b76aa0796fc04fd29aa802b",
"size": "13904",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "spec/models/project_spec.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "113860"
},
{
"name": "CoffeeScript",
"bytes": "139373"
},
{
"name": "Cucumber",
"bytes": "119759"
},
{
"name": "HTML",
"bytes": "448401"
},
{
"name": "JavaScript",
"bytes": "29805"
},
{
"name": "Ruby",
"bytes": "2418726"
},
{
"name": "Shell",
"bytes": "14336"
}
],
"symlink_target": ""
} |
package me.smartco.akstore.store.mongodb.mall;
import com.fasterxml.jackson.annotation.JsonView;
import me.smartco.akstore.common.model.Views;
import me.smartco.akstore.common.model.AbstractDocument;
import me.smartco.akstore.common.model.Attachment;
import me.smartco.akstore.common.model.Location;
import me.smartco.akstore.store.mongodb.market.DispatchProduct;
import me.smartco.akstore.store.mongodb.partner.Partner;
import me.smartco.akstore.store.mongodb.partner.Shop;
import org.springframework.data.annotation.PersistenceConstructor;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.mongodb.core.mapping.DBRef;
import org.springframework.data.mongodb.core.mapping.Document;
import org.springframework.util.Assert;
import java.math.BigDecimal;
import java.util.*;
/**
* Created by libin on 14-11-7.
*/
@Document
public class Product extends AbstractDocument {
private String name, shortDescription, description;
private BigDecimal price=BigDecimal.ZERO;
private float discount=1.0f;
private String remark="";
private float weight=0f;
private int quantity=0;
private Category cat0;
private Category cat1;
private Category cat2;
private Double[] location;
private Integer stock=0;
private Integer sold=0;
private String sn;
private Boolean feature=false;
private Map<String, String> attributes = new HashMap<String, String>();
@DBRef
private Shop shop;
//copy from shop
@DBRef
private Partner partner;
@DBRef
private Attachment picture;
private Boolean active=true;
private Set<ProductElement> elementSet=new HashSet<ProductElement>();
public static Pageable getDefaultPageable(int page){
Pageable pageable=new PageRequest(page,50, Sort.Direction.DESC,"sold");
return pageable;
}
/**
* Creates a new {@link Product} with the given name.
*
* @param name must not be {@literal null} or empty.
* @param price must not be {@literal null} or less than or equal to zero.
*/
public Product(Shop shop,String name, BigDecimal price) {
this(shop,name, price, null);
}
/**
* Creates a new {@link Product} from the given name and description.
*
* @param name must not be {@literal null} or empty.
* @param price must not be {@literal null} or less than or equal to zero.
* @param description
*/
@PersistenceConstructor
public Product(Shop shop,String name, BigDecimal price, String description) {
Assert.hasText(name, "Name must not be null or empty!");
Assert.isTrue(BigDecimal.ZERO.compareTo(price) < 0, "Price must be greater than zero!");
Assert.notNull(shop);
this.shop=shop;
setLocation(shop.getLocation());
this.partner=shop.partner();
this.name = name;
this.price = price;
this.description = description;
}
/**
* Sets the attribute with the given name to the given value.
*
* @param name must not be {@literal null} or empty.
* @param value
*/
public void setAttribute(String name, String value) {
Assert.hasText(name);
if (value == null) {
this.attributes.remove(value);
} else {
this.attributes.put(name, value);
}
}
/**
* Returns the {@link Product}'s name.
*
* @return
*/
@JsonView(Views.Brief.class)
public String getName() {
return name;
}
/**
* Returns the {@link Product}'s description.
*
* @return
*/
@JsonView(Views.Detail.class)
public String getDescription() {
return description;
}
/**
* Returns all the custom attributes of the {@link Product}.
*
* @return
*/
@JsonView(Views.Detail.class)
public Map<String, String> getAttributes() {
return Collections.unmodifiableMap(attributes);
}
/**
* Returns the price of the {@link Product}.
*
* @return
*/
@JsonView(Views.Brief.class)
public BigDecimal getPrice() {
return price;
}
public BigDecimal getSalePrice() {
return price.multiply(BigDecimal.valueOf(discount)).setScale(2, BigDecimal.ROUND_HALF_UP);
}
private void setLocation(Double[] location) {
this.location = location;
}
public Shop shop() {
return shop;
}
@JsonView(Views.Detail.class)
public Boolean getActive() {
return active;
}
public void setActive(Boolean active) {
this.active = active;
}
public void setCategories(Category cat0,Category cat1,Category cat2){
this.cat0=cat0;
this.cat1=cat1;
this.cat2=cat2;
}
@JsonView(Views.Detail.class)
public Category getCat0() {
return cat0;
}
public void setCat0(Category cat0) {
this.cat0 = cat0;
}
public Category getCat1() {
return cat1;
}
public void setCat1(Category cat1) {
this.cat1 = cat1;
}
@JsonView(Views.Detail.class)
public Category getCat2() {
return cat2;
}
public void setCat2(Category cat2) {
this.cat2 = cat2;
}
public Attachment picture() {
return picture;
}
@JsonView(Views.Brief.class)
public String getPictureUrl(){
if(null!=picture)
return picture.getPath();
return null;
}
public String getPicture_id(){
if(null!=picture)
return picture.getId();
return null;
}
public String getShopName() {
return shop.getName();
}
public String getShopId() {
return shop.getId();
}
public BigDecimal getMinFare() {
return shop.getMinFare();
}
public void setPicture(Attachment picture) {
this.picture = picture;
}
public String getRemark() {
return remark;
}
public void setRemark(String remark) {
this.remark = remark;
}
@JsonView(Views.Detail.class)
public float getWeight() {
return weight;
}
public void setWeight(float weight) {
this.weight = weight;
}
public float getDiscount() {
return discount;
}
public void setDiscount(float discount) {
this.discount = discount;
}
public void setPrice(BigDecimal price) {
this.price = price;
}
public void setDescription(String description) {
this.description = description;
}
public void setName(String name) {
this.name = name;
}
public void setAttribute(Map<String, String> attributes) {
this.attributes = attributes;
}
@JsonView(Views.Brief.class)
public String getShortDescription() {
return shortDescription;
}
public void setShortDescription(String shortDescription) {
this.shortDescription = shortDescription;
}
@JsonView(Views.Detail.class)
public Location getLocation() {
return Location.copy(location);
}
public void setLocation(Location loc) {
this.location=new Double[]{loc.getLat(),loc.getLng()};
}
public void setStock(Integer stock) {
this.stock = stock;
}
public void setSold(Integer sold) {
this.sold = sold;
}
public void setSn(String sn) {
this.sn = sn;
}
@JsonView(Views.Detail.class)
public Boolean getFeature() {
return feature;
}
public void setFeature(Boolean feature) {
this.feature = feature;
}
@JsonView(Views.Detail.class)
public Integer getStock() {
return stock;
}
public Integer getSold() {
return sold;
}
public String getOrigin() {
return attributes.get("origin");
}
@JsonView(Views.Detail.class)
public String getSn() {
return sn;
}
public Set<ProductElement> elementSet() {
return elementSet;
}
public void addElement(DispatchProduct product, int quantity){
elementSet.add(new ProductElement(product,quantity));
}
public void clearElement(){
elementSet.clear();
}
//@JsonView(Views.Protected.class)
public BigDecimal getPredictCost(){
BigDecimal cost=BigDecimal.ZERO;
for(ProductElement pe:elementSet){
cost=cost.add(pe.getPrice());
}
return cost.setScale(2, BigDecimal.ROUND_HALF_UP);
}
private int getQuantity() {
return quantity;
}
public void setQuantity(int quantity) {
this.quantity = quantity;
}
}
| {
"content_hash": "6078f27df2b83a08139dba9a09f238ae",
"timestamp": "",
"source": "github",
"line_count": 367,
"max_line_length": 98,
"avg_line_length": 23.762942779291553,
"alnum_prop": 0.6299736268776517,
"repo_name": "redlion99/akstore",
"id": "d574f40bc3859fce36067e9bae4813c8104a61dc",
"size": "8721",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "akstore-server/akstore-mall/src/main/java/me/smartco/akstore/store/mongodb/mall/Product.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "60313"
},
{
"name": "HTML",
"bytes": "61286"
},
{
"name": "Java",
"bytes": "218615"
},
{
"name": "JavaScript",
"bytes": "534757"
},
{
"name": "Scala",
"bytes": "61713"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="ko">
<head>
<!-- Generated by javadoc (version 1.7.0_21) on Wed Feb 26 23:03:20 KST 2014 -->
<title>Uses of Class etri.sdn.controller.protocol.io.IOFHandler.Role</title>
<meta name="date" content="2014-02-26">
<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class etri.sdn.controller.protocol.io.IOFHandler.Role";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.Role.html" title="enum in etri.sdn.controller.protocol.io">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?etri/sdn/controller/protocol/io/class-use/IOFHandler.Role.html" target="_top">Frames</a></li>
<li><a href="IOFHandler.Role.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class etri.sdn.controller.protocol.io.IOFHandler.Role" class="title">Uses of Class<br>etri.sdn.controller.protocol.io.IOFHandler.Role</h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.Role.html" title="enum in etri.sdn.controller.protocol.io">IOFHandler.Role</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#etri.sdn.controller">etri.sdn.controller</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="#etri.sdn.controller.protocol.io">etri.sdn.controller.protocol.io</a></td>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="etri.sdn.controller">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.Role.html" title="enum in etri.sdn.controller.protocol.io">IOFHandler.Role</a> in <a href="../../../../../../etri/sdn/controller/package-summary.html">etri.sdn.controller</a></h3>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing fields, and an explanation">
<caption><span>Fields in <a href="../../../../../../etri/sdn/controller/package-summary.html">etri.sdn.controller</a> declared as <a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.Role.html" title="enum in etri.sdn.controller.protocol.io">IOFHandler.Role</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Field and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>private <a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.Role.html" title="enum in etri.sdn.controller.protocol.io">IOFHandler.Role</a></code></td>
<td class="colLast"><span class="strong">OFController.</span><code><strong><a href="../../../../../../etri/sdn/controller/OFController.html#role">role</a></strong></code>
<div class="block">role of this controller.</div>
</td>
</tr>
</tbody>
</table>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../etri/sdn/controller/package-summary.html">etri.sdn.controller</a> that return <a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.Role.html" title="enum in etri.sdn.controller.protocol.io">IOFHandler.Role</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.Role.html" title="enum in etri.sdn.controller.protocol.io">IOFHandler.Role</a></code></td>
<td class="colLast"><span class="strong">OFController.</span><code><strong><a href="../../../../../../etri/sdn/controller/OFController.html#getRole()">getRole</a></strong>()</code>
<div class="block">get the role of this controller.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="etri.sdn.controller.protocol.io">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.Role.html" title="enum in etri.sdn.controller.protocol.io">IOFHandler.Role</a> in <a href="../../../../../../etri/sdn/controller/protocol/io/package-summary.html">etri.sdn.controller.protocol.io</a></h3>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing fields, and an explanation">
<caption><span>Fields in <a href="../../../../../../etri/sdn/controller/protocol/io/package-summary.html">etri.sdn.controller.protocol.io</a> declared as <a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.Role.html" title="enum in etri.sdn.controller.protocol.io">IOFHandler.Role</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Field and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>private <a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.Role.html" title="enum in etri.sdn.controller.protocol.io">IOFHandler.Role</a></code></td>
<td class="colLast"><span class="strong">OFSwitchImpl.</span><code><strong><a href="../../../../../../etri/sdn/controller/protocol/io/OFSwitchImpl.html#role">role</a></strong></code>
<div class="block">This field is used to exchange information with switch.</div>
</td>
</tr>
</tbody>
</table>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../etri/sdn/controller/protocol/io/package-summary.html">etri.sdn.controller.protocol.io</a> that return <a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.Role.html" title="enum in etri.sdn.controller.protocol.io">IOFHandler.Role</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.Role.html" title="enum in etri.sdn.controller.protocol.io">IOFHandler.Role</a></code></td>
<td class="colLast"><span class="strong">OFSwitchImpl.</span><code><strong><a href="../../../../../../etri/sdn/controller/protocol/io/OFSwitchImpl.html#getRole()">getRole</a></strong>()</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code><a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.Role.html" title="enum in etri.sdn.controller.protocol.io">IOFHandler.Role</a></code></td>
<td class="colLast"><span class="strong">IOFSwitch.</span><code><strong><a href="../../../../../../etri/sdn/controller/protocol/io/IOFSwitch.html#getRole()">getRole</a></strong>()</code>
<div class="block">Get the current role of the controller for the switch</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.Role.html" title="enum in etri.sdn.controller.protocol.io">IOFHandler.Role</a></code></td>
<td class="colLast"><span class="strong">IOFHandler.</span><code><strong><a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.html#getRole()">getRole</a></strong>()</code>
<div class="block">Get the current role of the controller</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>static <a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.Role.html" title="enum in etri.sdn.controller.protocol.io">IOFHandler.Role</a></code></td>
<td class="colLast"><span class="strong">IOFHandler.Role.</span><code><strong><a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.Role.html#valueOf(java.lang.String)">valueOf</a></strong>(java.lang.String name)</code>
<div class="block">Returns the enum constant of this type with the specified name.</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>static <a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.Role.html" title="enum in etri.sdn.controller.protocol.io">IOFHandler.Role</a>[]</code></td>
<td class="colLast"><span class="strong">IOFHandler.Role.</span><code><strong><a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.Role.html#values()">values</a></strong>()</code>
<div class="block">Returns an array containing the constants of this enum type, in
the order they are declared.</div>
</td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../etri/sdn/controller/protocol/io/IOFHandler.Role.html" title="enum in etri.sdn.controller.protocol.io">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?etri/sdn/controller/protocol/io/class-use/IOFHandler.Role.html" target="_top">Frames</a></li>
<li><a href="IOFHandler.Role.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
| {
"content_hash": "8e0fa3d728c7b6210c0a654d7e51ff33",
"timestamp": "",
"source": "github",
"line_count": 234,
"max_line_length": 354,
"avg_line_length": 51.952991452991455,
"alnum_prop": 0.6544377724767624,
"repo_name": "uni2u/iNaaS",
"id": "abb8d6dece491da050d31c0d3ae62a7e3674e740",
"size": "12157",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "Torpedo/doc/etri/sdn/controller/protocol/io/class-use/IOFHandler.Role.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "29731"
},
{
"name": "HTML",
"bytes": "8941198"
},
{
"name": "Java",
"bytes": "1215119"
},
{
"name": "JavaScript",
"bytes": "622667"
},
{
"name": "PHP",
"bytes": "25856"
}
],
"symlink_target": ""
} |
<resources>
<!--
Base application theme, dependent on API level. This theme is replaced
by AppBaseTheme from res/values-vXX/styles.xml on newer devices.
-->
<style name="AppBaseTheme" parent="android:Theme.Light">
<!--
Theme customizations available in newer API levels can go in
res/values-vXX/styles.xml, while customizations related to
backward-compatibility can go here.
-->
</style>
<!-- Application theme. -->
<style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
<!-- All customizations that are NOT specific to a particular API-level can go here. -->
</style>
</resources>
| {
"content_hash": "93395fda23761048baeed8aca29566f1",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 96,
"avg_line_length": 35.15,
"alnum_prop": 0.6386913229018493,
"repo_name": "Bedant/AndroidDrawing",
"id": "6b8c3545d48fd3cec611450698df5f0ed402a30d",
"size": "703",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "app/src/main/res/values/styles.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "45734"
}
],
"symlink_target": ""
} |
"use strict";
var compile = require('es6-templates').compile;
var extend = require('extend');
var fs = require('fs');
var gutil = require('gulp-util');
var join = require('path').join;
var through = require('through2');
module.exports = exports = function (options) {
var defaults = {
base: '/',
extension: '.css',
target: 'es6'
};
options = extend({}, defaults, options || {});
return through.obj(function (file, enc, cb) {
if (file.isNull()) {
cb(null, file);
return;
}
if (file.isStream()) {
cb(new gutil.PluginError('gulp-ng2-inline-styles', 'Streaming not supported'));
return;
}
try {
file.contents = new Buffer(inline(file.contents.toString(), options));
this.push(file);
} catch (err) {
this.emit('error', new gutil.PluginError('gulp-ng2-inline-styles', err, {fileName: file.path}));
}
cb();
});
};
var STYLE_URLS = 'styleUrls';
var STYLES = 'styles';
function inline(file, options) {
var index1, index2, startLine, endLine, styleUrls;
var lines = file.split('\n');
var preffix = '';
var suffix = '';
lines.forEach(function (line, i) {
index1 = line.indexOf(STYLE_URLS);
index2 = line.indexOf(']');
// Single line array definition
if (index1 >= 0 && index2 > 0) {
startLine = i;
styleUrls = lines[i].slice(index1, index2 + 1);
preffix = line.slice(0, index1);
suffix = line.slice(index2 + 1);
lines[i] = preffix + replace(styleUrls, options) + suffix;
}
// Multiple line array definition
if (index1 >= 0 && index2) {
startLine = i;
preffix = line.slice(0, index1);
}
if (index2 >= 0 && index1 < 0 && startLine !== undefined) {
endLine = i;
var _lines = lines.splice(startLine, (i - startLine + 1));
styleUrls = _lines.join('');
lines.splice(startLine, 0, preffix + replace(styleUrls, options));
}
});
return lines.join('\n');
}
// ----------------------
// Utils
function replace(styleUrls, options) {
var styles = '';
var urls = eval('({' + styleUrls + '}).styleUrls');
urls.forEach(function (url, i) {
var coma = i > 0 ? ', ' : '';
styles += coma + getStylesString(url, options);
});
var newLines = STYLES + ': [' + styles + ']';
newLines += hasTraillingComa(styleUrls) ? ',' : '';
return newLines;
}
function getStylesString(stylesPath, options) {
var stylesAbsPath = join(process.cwd(), options.base, stylesPath);
var styles = fs.readFileSync(stylesAbsPath, 'utf8');
var string = '`' +
trimTrailingLineBreak(styles) +
'`';
if (options.target === 'es5') {
string = compile(string);
}
return string;
}
function trimTrailingLineBreak(styles) {
var lines = styles.split('\n');
// var trim = lines.splice(-1, 1);
return (lines.pop() === '' ? lines.join('\n') : styles);
}
function hasTraillingComa(styleUrls) {
return styleUrls.slice(-1) === ',' ? true : false;
}
| {
"content_hash": "560dec6ba7ec344dfbf1ef606d6dea70",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 102,
"avg_line_length": 25.11764705882353,
"alnum_prop": 0.5868183338909334,
"repo_name": "ludohenin/gulp-inline-ng2-styles",
"id": "e676c8174bc6686f115fafe79d72f1c9ba279601",
"size": "2989",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "index.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "7295"
}
],
"symlink_target": ""
} |
module TextRank
##
# Determines "overlap" between two fingerprints at each N prefixes
#
# For example,
#
# FingerprintOverlap.new(
# %w[a b c d],
# %w[b e a c],
# ).overlap
#
# => [
# 0, # [a] & (b) have no overlap
# 1, # [a b] & [b e] have one overlap: b
# 2, # [a b c] & [b e a] have two overlap: a & b
# 3, # [a b c d] & [b e a c] have three overlap: a, b, & c
# ]
##
class FingerprintOverlap
attr_reader :overlap
def initialize(values1, values2)
raise ArgumentError, 'Value size mismatch' if values1.size != values2.size
@encountered1 = Set.new
@encountered2 = Set.new
@overlap_count = 0
@overlap = determine_overlap(values1, values2)
end
private
def determine_overlap(values1, values2)
values1.zip(values2).map do |v1, v2|
encounter(v1, v2)
@overlap_count
end
end
# This algorithm is a little more complex than could be represented in Ruby,
# but we want to keep it as performant as possible.
def encounter(value1, value2)
if value1 == value2
@overlap_count += 1
else
# Delete from the set in case an element appears more than once
@encountered1.delete?(value2) ? (@overlap_count += 1) : (@encountered2 << value2)
@encountered2.delete?(value1) ? (@overlap_count += 1) : (@encountered1 << value1)
end
end
end
end
| {
"content_hash": "6f7a03d4f285f3cec4a407b3e5f49151",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 89,
"avg_line_length": 26.236363636363638,
"alnum_prop": 0.5835065835065835,
"repo_name": "david-mccullars/text_rank",
"id": "f6949aa294c4de65b169e281705e95d6a2412ecb",
"size": "1443",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "lib/text_rank/fingerprint_overlap.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "10944"
},
{
"name": "Ruby",
"bytes": "88340"
},
{
"name": "Shell",
"bytes": "131"
}
],
"symlink_target": ""
} |
package io.cattle.platform.app;
import io.cattle.platform.extension.spring.ExtensionDiscovery;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.ImportResource;
@Configuration
@ImportResource({"classpath:META-INF/cattle/process/spring-process-context.xml"})
public class ProcessConfig {
@Bean
ExtensionDiscovery ExtensionDiscovery() {
return new ExtensionDiscovery();
}
}
| {
"content_hash": "b5af7bcec4e06a320ccadad99b1d7fbc",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 81,
"avg_line_length": 28.11111111111111,
"alnum_prop": 0.8003952569169961,
"repo_name": "Cerfoglg/cattle",
"id": "5c78c179ccf690c952a8a0229e65767ab8050dca",
"size": "506",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "code/packaging/app-config/src/main/java/io/cattle/platform/app/ProcessConfig.java",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "5271"
},
{
"name": "FreeMarker",
"bytes": "71"
},
{
"name": "Java",
"bytes": "6398519"
},
{
"name": "Makefile",
"bytes": "308"
},
{
"name": "Python",
"bytes": "1582534"
},
{
"name": "Shell",
"bytes": "41134"
}
],
"symlink_target": ""
} |
/* eslint-disable @typescript-eslint/no-var-requires */
export const { default: IconLogo } = require('@/resources/images/logo.svg');
export const { default: Guide } = require('@/resources/images/guide.svg');
export const { default: Component } = require('@/resources/images/component.svg');
export const { default: Tools } = require('@/resources/images/tools.svg');
export const { default: Search } = require('@/resources/images/search.svg');
export const { default: Discord } = require('@/resources/images/discord.svg');
export const { default: Design } = require('@/resources/images/design.svg');
export const { default: Extension } = require('@/resources/images/extension.svg');
export const { default: Gitee } = require('@/resources/images/gitee.svg');
export const { default: LightOn } = require('@/resources/images/light-on.svg');
export const { default: LightOff } = require('@/resources/images/light-off.svg');
export const { default: Rtl } = require('@/resources/images/rtl.svg');
export const { default: Ltr } = require('@/resources/images/ltr.svg');
export const { default: Avatar } = require('@/resources/images/avatar-user.svg');
export const { default: Light } = require('@/resources/images/light.svg');
export const { default: Dark } = require('@/resources/images/dark.svg');
export const { default: More } = require('@/resources/images/more.svg');
export const { default: Language } = require('@/resources/images/language.svg');
| {
"content_hash": "824f60c94e5448932901572780a544c3",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 82,
"avg_line_length": 76.05263157894737,
"alnum_prop": 0.7141868512110726,
"repo_name": "suitejs/suite",
"id": "5dbffd66f0b3ec0faef0573e92dbad8695d4efcb",
"size": "1445",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/components/SvgIcons.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "178942"
}
],
"symlink_target": ""
} |
<?php
namespace Magento\UrlRewrite\Controller;
use Magento\UrlRewrite\Controller\Adminhtml\Url\Rewrite;
use Magento\UrlRewrite\Model\OptionProvider;
use Magento\UrlRewrite\Model\UrlFinderInterface;
use Magento\UrlRewrite\Service\V1\Data\UrlRewrite;
/**
* UrlRewrite Controller Router
*/
class Router implements \Magento\Framework\App\RouterInterface
{
/** var \Magento\Framework\App\ActionFactory */
protected $actionFactory;
/** @var \Magento\Framework\UrlInterface */
protected $url;
/** @var \Magento\Store\Model\StoreManagerInterface */
protected $storeManager;
/** @var \Magento\Framework\App\ResponseInterface */
protected $response;
/** @var UrlFinderInterface */
protected $urlFinder;
/**
* @param \Magento\Framework\App\ActionFactory $actionFactory
* @param \Magento\Framework\UrlInterface $url
* @param \Magento\Store\Model\StoreManagerInterface $storeManager
* @param \Magento\Framework\App\ResponseInterface $response
* @param UrlFinderInterface $urlFinder
*/
public function __construct(
\Magento\Framework\App\ActionFactory $actionFactory,
\Magento\Framework\UrlInterface $url,
\Magento\Store\Model\StoreManagerInterface $storeManager,
\Magento\Framework\App\ResponseInterface $response,
UrlFinderInterface $urlFinder
) {
$this->actionFactory = $actionFactory;
$this->url = $url;
$this->storeManager = $storeManager;
$this->response = $response;
$this->urlFinder = $urlFinder;
}
/**
* Match corresponding URL Rewrite and modify request
*
* @param \Magento\Framework\App\RequestInterface $request
* @return \Magento\Framework\App\ActionInterface|null
*/
public function match(\Magento\Framework\App\RequestInterface $request)
{
if ($fromStore = $request->getParam('___from_store')) {
$oldStoreId = $this->storeManager->getStore($fromStore)->getId();
$oldRewrite = $this->getRewrite($request->getPathInfo(), $oldStoreId);
if ($oldRewrite) {
$rewrite = $this->urlFinder->findOneByData(
[
UrlRewrite::ENTITY_TYPE => $oldRewrite->getEntityType(),
UrlRewrite::ENTITY_ID => $oldRewrite->getEntityId(),
UrlRewrite::STORE_ID => $this->storeManager->getStore()->getId(),
UrlRewrite::IS_AUTOGENERATED => 1,
]
);
if ($rewrite && $rewrite->getRequestPath() !== $oldRewrite->getRequestPath()) {
return $this->redirect($request, $rewrite->getRequestPath(), OptionProvider::TEMPORARY);
}
}
}
$rewrite = $this->getRewrite($request->getPathInfo(), $this->storeManager->getStore()->getId());
if ($rewrite === null) {
return null;
}
if ($rewrite->getRedirectType()) {
return $this->processRedirect($request, $rewrite);
}
$request->setAlias(\Magento\Framework\UrlInterface::REWRITE_REQUEST_PATH_ALIAS, $rewrite->getRequestPath());
$request->setPathInfo('/' . $rewrite->getTargetPath());
return $this->actionFactory->create('Magento\Framework\App\Action\Forward');
}
/**
* @param \Magento\Framework\App\RequestInterface $request
* @param UrlRewrite $rewrite
* @return \Magento\Framework\App\ActionInterface|null
*/
protected function processRedirect($request, $rewrite)
{
$target = $rewrite->getTargetPath();
if ($rewrite->getEntityType() !== Rewrite::ENTITY_TYPE_CUSTOM
|| ($prefix = substr($target, 0, 6)) !== 'http:/' && $prefix !== 'https:'
) {
$target = $this->url->getUrl('', ['_direct' => $target]);
}
return $this->redirect($request, $target, $rewrite->getRedirectType());
}
/**
* @param \Magento\Framework\App\RequestInterface $request
* @param string $url
* @param int $code
* @return \Magento\Framework\App\ActionInterface
*/
protected function redirect($request, $url, $code)
{
$this->response->setRedirect($url, $code);
$request->setDispatched(true);
return $this->actionFactory->create('Magento\Framework\App\Action\Redirect');
}
/**
* @param string $requestPath
* @param int $storeId
* @return UrlRewrite|null
*/
protected function getRewrite($requestPath, $storeId)
{
return $this->urlFinder->findOneByData([
UrlRewrite::REQUEST_PATH => trim($requestPath, '/'),
UrlRewrite::STORE_ID => $storeId,
]);
}
}
| {
"content_hash": "72c48dadffc0b2045c29d8b5d313d52a",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 116,
"avg_line_length": 36.2442748091603,
"alnum_prop": 0.6122577927548442,
"repo_name": "j-froehlich/magento2_wk",
"id": "e7cb54058d4688bcb2b537408c14ac190cca7442",
"size": "4856",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vendor/magento/module-url-rewrite/Controller/Router.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "13636"
},
{
"name": "CSS",
"bytes": "2076720"
},
{
"name": "HTML",
"bytes": "6151072"
},
{
"name": "JavaScript",
"bytes": "2488727"
},
{
"name": "PHP",
"bytes": "12466046"
},
{
"name": "Shell",
"bytes": "6088"
},
{
"name": "XSLT",
"bytes": "19979"
}
],
"symlink_target": ""
} |
<?php
include_once 'HessianFactory.php';
include_once 'HessianTransport.php';
/**
* Proxy to issue RPC calls to remote Hessian services
*/
class HessianClient{
private $url;
private $options;
private $typemap;
protected $factory;
/**
* Creates a new Client proxy, takes an url and an optional options object
* that can also be an array
* @param string $url
* @param mixed $options
*/
public function __construct($url, $options = null){
$this->url = $url;
$this->options = HessianOptions::resolveOptions($options);
$this->typemap = new HessianTypeMap($this->options->typeMap);
$this->factory = new HessianFactory();
}
/**
* Issues a call to a remote service. It will raise a HessianFault exception if
* there is an error
* @param string $method Name of the method in the remote service
* @param array $arguments Optional arguments
* @return mixed
*/
public function __hessianCall($method, $arguments = array()){
if(strpos($method, "__") === 0)
throw new HessianException("Cannot call methods that start with __");
$transport = $this->factory->getTransport($this->options);
$writer = $this->factory->getWriter(null, $this->options);
$writer->setTypeMap($this->typemap);
$ctx = new HessianCallingContext();
$ctx->writer = $writer;
$ctx->transport = $transport;
$ctx->options = $this->options;
$ctx->typemap = $this->typemap;
$ctx->call = new HessianCall($method, $arguments);
$ctx->url = $this->url;
$ctx->payload = $writer->writeCall($method, $arguments);
$args = array($ctx);
foreach($this->options->interceptors as $interceptor){
$interceptor->beforeRequest($ctx);
}
$this->__handleCallbacks($this->options->before, $args);
$stream = $transport->getStream($this->url, $ctx->payload, $this->options);
$parser = $this->factory->getParser($stream, $this->options);
$parser->setTypeMap($this->typemap);
// TODO deal with headers, packets and the rest of aditional stuff
$ctx->parser = $parser;
$ctx->stream = $stream;
try{
$result = $parser->parseTop();
} catch(Exception $e){
$ctx->error = $e;
}
foreach($this->options->interceptors as $interceptor){
$interceptor->afterRequest($ctx);
}
$this->__handleCallbacks($this->options->after, $args);
if($ctx->error instanceof Exception)
throw $ctx->error;
return $result;
}
private function __handleCallbacks($callbacks, $arguments){
if(!$callbacks)
return;
if(is_callable($callbacks)){
return call_user_func_array($callbacks, $arguments);
}
if(!is_array($callbacks))
return;
foreach($callbacks as $call){
if(is_callable($call)){
return call_user_func_array($call, $arguments);
}
}
}
/**
* Magic function wrapper for the remote call. It will fail if called
* with methods that start with __ which are conventionally private
* @param string $method
* @param array $arguments
* @return mixed Result of the remote call
*/
public function __call($method, $arguments){
return $this->__hessianCall($method, $arguments);
}
/**
* Returns this client's current options
* @return HessianOptions
*/
public function __getOptions(){
return $this->options;
}
/**
* Returns the current typemap for this client
* @return HessianTypeMap
*/
public function __getTypeMap(){
return $this->typemap;
}
} | {
"content_hash": "3c7f4224efb11b45cd48ebcb201425c8",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 81,
"avg_line_length": 28.235772357723576,
"alnum_prop": 0.6446875899798445,
"repo_name": "yeluolei/hessianphp",
"id": "e638e0b4b7003b98c912951378b6a9e58f9dc5a0",
"size": "3693",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "src/HessianClient.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "122557"
},
{
"name": "Smarty",
"bytes": "2236"
}
],
"symlink_target": ""
} |
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>client</artifactId>
<packaging>jar</packaging>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
</plugin>
</plugins>
</build>
<parent>
<groupId>uk.ac.ebi.biosamples</groupId>
<artifactId>biosamples</artifactId>
<version>5.2.14-SNAPSHOT</version>
<relativePath>../../</relativePath>
</parent>
<dependencies>
<dependency>
<groupId>uk.ac.ebi.biosamples</groupId>
<artifactId>models-core</artifactId>
<version>5.2.14-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>uk.ac.ebi.biosamples</groupId>
<artifactId>properties</artifactId>
<version>5.2.14-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>uk.ac.ebi.biosamples</groupId>
<artifactId>utils-thread</artifactId>
<version>5.2.14-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.springframework.hateoas</groupId>
<artifactId>spring-hateoas</artifactId>
<version>1.2.0</version>
</dependency>
<!-- unmentioned requirements of hateoas -->
<dependency>
<groupId>org.springframework.plugin</groupId>
<artifactId>spring-plugin-core</artifactId>
<version>2.0.0.RELEASE</version>
</dependency>
<!-- needed for traverson to work properly -->
<dependency>
<groupId>com.jayway.jsonpath</groupId>
<artifactId>json-path</artifactId>
</dependency>
<dependency>
<groupId>com.auth0</groupId>
<artifactId>java-jwt</artifactId>
<version>3.2.0</version>
</dependency>
<dependency>
<groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId>
<version>1.3.1</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.2</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>2.3</version>
</dependency>
</dependencies>
</project>
| {
"content_hash": "a3d45fc31819db68ef7288c5d7ba46d4",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 104,
"avg_line_length": 29.120481927710845,
"alnum_prop": 0.6727347952006619,
"repo_name": "EBIBioSamples/biosamples-v4",
"id": "7cc5f686e21151117b1baf5b19609a7217db9941",
"size": "2417",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "client/client/pom.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "27602"
},
{
"name": "Dockerfile",
"bytes": "405"
},
{
"name": "HTML",
"bytes": "175748"
},
{
"name": "Java",
"bytes": "2363410"
},
{
"name": "JavaScript",
"bytes": "192941"
},
{
"name": "Shell",
"bytes": "11122"
}
],
"symlink_target": ""
} |
import {render} from 'react-dom';
import React from 'react';
import Counter from './index';
window.renderCounterClient = function(id, props) {
const node = document.getElementById(id);
render(<Counter {...props} />, node);
};
| {
"content_hash": "ca799877f2e0e4f2d841095f02d88f38",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 50,
"avg_line_length": 28.875,
"alnum_prop": 0.696969696969697,
"repo_name": "jdlehman/js_render",
"id": "b65a5b0ce548ad63a9bc22e773e5bafa16f5c4f1",
"size": "231",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/react/app/assets/javascripts/components/Counter/renderClient.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "2317"
},
{
"name": "Ruby",
"bytes": "20130"
},
{
"name": "Shell",
"bytes": "131"
}
],
"symlink_target": ""
} |
author: slowe
categories: News
comments: true
date: 2011-11-09T13:30:27Z
slug: new-version-of-uimp
tags:
- Networking
- Storage
- Vblock
- Virtualization
title: New Version of UIM/P
url: /2011/11/09/new-version-of-uimp/
wordpress_id: 2459
---
An e-mail popped into my Inbox on Monday notifying me that Unified Infrastructure Manager/Provisioning (UIM/P) version 3.0 has been released and is now generally available. You might already be familiar with UIM/P as it is the primary provisioning tool for Vblocks.
Some features that are new to this version of UIM/P include:
* UIM/P now has the ability to not only add new blades, VLANs, and datastores to a cluster, but also the ability to remove VLANs and individual blades (called "elastic operations").
* UIM/P now supports vCenter Server 5.0 and ESXi 5.0.
* UIM/P has the ability to assign individual pools to specific Vblocks and specific fabrics.
* UIM/P offers integration into vCloud Director so that UIM/P can push resources into a new Provider vDC.
* UIM/P's performance when provisioning infrastructure services is now significantly faster.
Product documentation and the product download are both available via Powerlink.
| {
"content_hash": "a3c043c226e97020c0fe57055052c68b",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 265,
"avg_line_length": 39.56666666666667,
"alnum_prop": 0.7843302443133952,
"repo_name": "lowescott/weblog",
"id": "7493d86ff87250b3e9bb2e87a5ac441e0353c1b4",
"size": "1191",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "content/post/2011-11-09-new-version-of-uimp.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "25289"
},
{
"name": "HTML",
"bytes": "16683"
}
],
"symlink_target": ""
} |
layout: page
title: Liens
header: Liens
group: navigation
---
{% capture site_tags %}{% for tag in site.tags %}{{ tag | first }}{% unless forloop.last %},{% endunless %}{% endfor %}{% endcapture %}
{% assign tag_words = site_tags | split:',' | sort %}
<div class="col-xs-6">
<ul class="nav nav-tabs-vertical">
<li><a href="http://addyosmani.com/blog/" target="_blank">Addy Osmani</a></li>
<li><a href="http://gericci.me/" target="_blank">GeRicci.me</a></li>
</ul>
</div>
<div class="clearfix"></div>
| {
"content_hash": "eb1c9c43b503fa5fa06604fa159310fb",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 135,
"avg_line_length": 29.11111111111111,
"alnum_prop": 0.6030534351145038,
"repo_name": "Gillespie59/Gillespie59.github.io",
"id": "495d553b09869ff3f8044689a7be2a633cfb0795",
"size": "528",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "liens.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "100583"
},
{
"name": "GLSL",
"bytes": "1654"
},
{
"name": "HTML",
"bytes": "5193337"
},
{
"name": "JavaScript",
"bytes": "71908"
},
{
"name": "Ruby",
"bytes": "6889"
}
],
"symlink_target": ""
} |
package dagger.functional.producers.cancellation;
import static com.google.common.truth.Truth.assertThat;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import dagger.functional.producers.cancellation.CancellationComponent.Dependency;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests cancellation of tasks in production subcomponents. */
@RunWith(JUnit4.class)
public class ProducerSubcomponentCancellationTest {
private final ProducerTester tester = new ProducerTester();
private final CancellationComponent component =
DaggerCancellationComponent.builder()
.module(new CancellationModule(tester))
.dependency(new Dependency(tester))
.executor(MoreExecutors.directExecutor())
.build();
private final CancellationSubcomponent subcomponent =
component.subcomponentBuilder().module(new CancellationSubcomponentModule(tester)).build();
@Test
public void initialState() {
tester.assertNoStartedNodes();
}
@Test
public void cancellingSubcomponent_doesNotCancelParent() throws Exception {
ListenableFuture<String> subcomponentEntryPoint = subcomponent.subcomponentEntryPoint();
// Subcomponent entry point depends on all leaves from the parent component and on the single
// leaf in the subcomponent itself, so they should all have started.
tester.assertStarted("leaf1", "leaf2", "leaf3", "subLeaf").only();
assertThat(subcomponentEntryPoint.cancel(true)).isTrue();
assertThat(subcomponentEntryPoint.isCancelled()).isTrue();
// None of the tasks running in the parent were cancelled.
tester.assertNotCancelled("leaf1", "leaf2", "leaf3");
tester.assertCancelled("subLeaf").only();
// Finish all the parent tasks to ensure that it can still complete normally.
tester.complete(
"dependencyFuture",
"leaf1",
"leaf2",
"leaf3",
"foo",
"bar",
"baz",
"qux",
"entryPoint1",
"entryPoint2");
assertThat(component.entryPoint1().get(1, MILLISECONDS)).isEqualTo("completed");
assertThat(component.entryPoint2().get().get(1, MILLISECONDS)).isEqualTo("completed");
}
@Test
public void cancellingSubcomponent_preventsUnstartedNodesFromStarting() {
ListenableFuture<String> subcomponentEntryPoint = subcomponent.subcomponentEntryPoint();
tester.complete("subLeaf");
tester.assertNotStarted("subTask1", "subTask2");
subcomponentEntryPoint.cancel(true);
// Complete the remaining dependencies of subTask1 and subTask2.
tester.complete("leaf1", "leaf2", "leaf3", "foo", "bar", "baz", "qux");
// Since the subcomponent was cancelled, they are not started.
tester.assertNotStarted("subTask1", "subTask2");
}
@Test
public void cancellingProducerFromComponentDependency_inSubcomponent_cancelsUnderlyingTask()
throws Exception {
// Request subcomponent's entry point.
ListenableFuture<String> subcomponentEntryPoint = subcomponent.subcomponentEntryPoint();
// Finish all parent tasks so that the subcomponent's tasks can start.
tester.complete("leaf1", "leaf2", "leaf3", "foo", "bar", "baz", "qux", "subLeaf");
tester.assertStarted("subTask1", "subTask2");
tester.assertNotCancelled("subTask1", "subTask2");
// When subTask2 runs, it cancels the dependency future.
// TODO(cgdecker): Is this what we want to happen?
// On the one hand, there's a policy of "futures from component dependencies come from outside
// our control and should be cancelled unconditionally". On the other hand, the dependency is
// coming from the parent component, and the policy is also not to cancel things belonging to
// the parent unless it allows that.
tester.assertCancelled("dependencyFuture");
// The future it returns didn't depend directly on that future, though, so the subcomponent
// should be able to complete normally.
tester.complete("subTask1", "subTask2", "subEntryPoint");
assertThat(subcomponentEntryPoint.get(1, MILLISECONDS)).isEqualTo("completed");
}
}
| {
"content_hash": "f0a4309b623f24fd667d04408297b99e",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 98,
"avg_line_length": 39.31481481481482,
"alnum_prop": 0.7298634008478568,
"repo_name": "ze-pequeno/dagger",
"id": "246bf9ffd86f4005d559dc6fcf2370af1b9ca267",
"size": "4848",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "javatests/dagger/functional/producers/cancellation/ProducerSubcomponentCancellationTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "5750471"
},
{
"name": "Kotlin",
"bytes": "370075"
},
{
"name": "Python",
"bytes": "409"
},
{
"name": "Shell",
"bytes": "22477"
},
{
"name": "Starlark",
"bytes": "411395"
}
],
"symlink_target": ""
} |
<?php
namespace Sybis\Repositories;
use Sybis\User;
use Sybis\Product;
class ProductRepository {
public function getById($id) {
return Product::where('id', $id)
->get();
}
/**
* Get all products for user.
*
* @param User $user - Because in normal project we maybe must test privilegies of user for show any products.
* @param integer $pageNum - Number of page.
* @return Collection
*/
public function page(User $user) {
return Product::orderBy('name', 'asc')
->paginate(15);
}
/**
* Get all products having sums of costs for user.
* (из задания можно предположить, что пагинация не нужна)
*
* @param User $user - Because in normal project we maybe must test privilegies of user for show any products.
* @return Collection
*/
public function sumsales(User $user, $orderBy='name') {
if ($orderBy === 'sumsales_desk') {
$orderBy = 'sumsales DESC';
} else {
$orderBy = 'name ASC';
}
$wher = '';
if (!empty($_GET['filter_from'])) {
$wher .= ' AND sales.date_of_purchase >= "'. \Input::get('filter_from').'"';
}
if (!empty($_GET['filter_to'])) {
$wher .= ' AND sales.date_of_purchase <= "'. \Input::get('filter_to').'"';
}
$results = \DB::select('SELECT products.id AS id, products.name AS name, SUM(sales.cost) AS sumsales FROM products, sales WHERE products.id = sales.product_id '.$wher.' GROUP BY id ORDER BY '.$orderBy.';');
return $results;
}
} | {
"content_hash": "1e5a13a93b329345b306808c16d5fdd9",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 214,
"avg_line_length": 31.705882352941178,
"alnum_prop": 0.5652442795299938,
"repo_name": "rogallic/rogallic_tj_sybis",
"id": "aa842e99c8f0d0b5ab0c9717acc371fed6448932",
"size": "1662",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/Repositories/ProductRepository.php",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "412"
},
{
"name": "CSS",
"bytes": "72"
},
{
"name": "HTML",
"bytes": "29532"
},
{
"name": "JavaScript",
"bytes": "503"
},
{
"name": "PHP",
"bytes": "79154"
}
],
"symlink_target": ""
} |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.bpmn.subprocess.util;
import org.camunda.bpm.engine.delegate.DelegateExecution;
import org.camunda.bpm.engine.delegate.JavaDelegate;
import org.camunda.bpm.engine.runtime.ActivityInstance;
/**
* @author Daniel Meyer
*
*/
public class GetActInstanceDelegate implements JavaDelegate {
public static ActivityInstance activityInstance = null;
public void execute(DelegateExecution execution) throws Exception {
activityInstance = execution.getProcessEngineServices()
.getRuntimeService()
.getActivityInstance(execution.getProcessInstanceId());
}
}
| {
"content_hash": "86a60ab44bff83ba0a0b1904d12fa51d",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 75,
"avg_line_length": 35.45454545454545,
"alnum_prop": 0.7683760683760684,
"repo_name": "nagyistoce/camunda-bpm-platform",
"id": "18d96aeb4b2364557574f7341e4769c9b8e2c9c5",
"size": "1170",
"binary": false,
"copies": "20",
"ref": "refs/heads/master",
"path": "engine/src/test/java/org/camunda/bpm/engine/test/bpmn/subprocess/util/GetActInstanceDelegate.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "6657"
},
{
"name": "CSS",
"bytes": "1301"
},
{
"name": "Groovy",
"bytes": "1594"
},
{
"name": "HTML",
"bytes": "35196"
},
{
"name": "Java",
"bytes": "18674320"
},
{
"name": "JavaScript",
"bytes": "43"
},
{
"name": "PLpgSQL",
"bytes": "3267"
},
{
"name": "Python",
"bytes": "187"
},
{
"name": "Ruby",
"bytes": "60"
},
{
"name": "Shell",
"bytes": "4048"
}
],
"symlink_target": ""
} |
{% extends "admin.html" %}
{% block title %}Register a new user{% endblock %}
{% block maincontent %}
<h1>Create a new department</h1>
<form action="/admin/addDepartment" method="POST" class="form-horizontal">
<div class="form-group">
<label for="dep_id">Department id</label>
<input type="text" class="form-control" name="dep_id" />
</div>
<div class="form-group">
<label for="name">Department name</label>
<input type="text" class="form-control" name="name" />
</div>
<div class="form-group">
<label for="hod">HOD</label>
<input type="text" class="form-control" name="hod" />
</div>
<input type="submit" class="btn btn-default" value="Create new department" />
</form>
{% endblock %}
| {
"content_hash": "1172a633b771120944e16ce5d1c54822",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 77,
"avg_line_length": 25.74074074074074,
"alnum_prop": 0.6719424460431654,
"repo_name": "Deepakkoli93/gae",
"id": "03de3b6b3748228e04cf6b7e5b27d7f5c7962157",
"size": "695",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "templates/admin/addDepartment.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "12388"
},
{
"name": "HTML",
"bytes": "194983"
},
{
"name": "JavaScript",
"bytes": "345471"
},
{
"name": "Python",
"bytes": "53479"
}
],
"symlink_target": ""
} |
//========================================================================
//Copyright 2007-2011 David Yu dyuproject@gmail.com
//------------------------------------------------------------------------
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//http://www.apache.org/licenses/LICENSE-2.0
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//========================================================================
package io.protostuff;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Arrays;
import java.util.Map;
import io.protostuff.StringSerializer.STRING;
/**
* Test jsonx ser/deser for runtime {@link Map} fields.
*
* @author David Yu
* @created Jan 22, 2011
*/
public class JsonXRuntimeMapTest extends AbstractJsonRuntimeMapTest
{
@Override
protected boolean isNumeric()
{
return false;
}
@Override
protected <T> byte[] toByteArray(T message, Schema<T> schema)
{
return JsonXIOUtil.toByteArray(message, schema, isNumeric(), buf());
}
@Override
protected <T> void writeTo(OutputStream out, T message, Schema<T> schema) throws IOException
{
JsonXIOUtil.writeTo(out, message, schema, isNumeric(), buf());
}
@Override
protected <T> void roundTrip(T message, Schema<T> schema,
Pipe.Schema<T> pipeSchema) throws Exception
{
byte[] json = JsonXIOUtil.toByteArray(message, schema, isNumeric(), buf());
ByteArrayInputStream jsonStream = new ByteArrayInputStream(json);
byte[] protostuff = ProtostuffIOUtil.toByteArray(
JsonIOUtil.newPipe(json, 0, json.length, isNumeric()), pipeSchema, buf());
byte[] protostuffFromStream = ProtostuffIOUtil.toByteArray(
JsonIOUtil.newPipe(jsonStream, isNumeric()), pipeSchema, buf());
assertTrue(Arrays.equals(protostuff, protostuffFromStream));
T parsedMessage = schema.newMessage();
ProtostuffIOUtil.mergeFrom(protostuff, parsedMessage, schema);
SerializableObjects.assertEquals(message, parsedMessage);
ByteArrayInputStream protostuffStream = new ByteArrayInputStream(protostuff);
byte[] jsonRoundTrip = JsonXIOUtil.toByteArray(
ProtostuffIOUtil.newPipe(protostuff, 0, protostuff.length), pipeSchema, isNumeric(), buf());
byte[] jsonRoundTripFromStream = JsonXIOUtil.toByteArray(
ProtostuffIOUtil.newPipe(protostuffStream), pipeSchema, isNumeric(), buf());
assertTrue(jsonRoundTrip.length == jsonRoundTripFromStream.length);
String strJsonRoundTrip = STRING.deser(jsonRoundTrip);
assertEquals(strJsonRoundTrip, STRING.deser(jsonRoundTripFromStream));
assertTrue(jsonRoundTrip.length == json.length);
assertEquals(strJsonRoundTrip, STRING.deser(json));
}
}
| {
"content_hash": "5e916435c81382aa336d03adac3ccfad",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 108,
"avg_line_length": 35.97802197802198,
"alnum_prop": 0.660965180207697,
"repo_name": "tsheasha/protostuff",
"id": "453d8e63e70d7767f7f7357c00a755554fb1b48b",
"size": "3274",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "protostuff-json/src/test/java/io/protostuff/JsonXRuntimeMapTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "GAP",
"bytes": "34142"
},
{
"name": "Java",
"bytes": "3273212"
},
{
"name": "Protocol Buffer",
"bytes": "62397"
},
{
"name": "Shell",
"bytes": "467"
}
],
"symlink_target": ""
} |
// .NAME vtkGraphWeightFilter - Base class for filters that weight graph
// edges.
//
// .SECTION Description
// vtkGraphWeightFilter is the abstract base class that provides an interface
// for classes that apply weights to graph edges. The weights are added
// as a vtkFloatArray named "Weights."
// The ComputeWeight function must be implemented to provide the function of two
// vertices which determines the weight of each edge.
// The CheckRequirements function can be implemented if you wish to ensure
// that the input graph has all of the properties that will be required
// by the ComputeWeight function.
#ifndef __vtkGraphWeightFilter_h
#define __vtkGraphWeightFilter_h
#include "vtkGraphAlgorithm.h"
class vtkGraph;
class VTK_FILTERING_EXPORT vtkGraphWeightFilter : public vtkGraphAlgorithm
{
public:
vtkTypeMacro(vtkGraphWeightFilter, vtkGraphAlgorithm);
void PrintSelf(ostream& os, vtkIndent indent);
protected:
vtkGraphWeightFilter(){}
~vtkGraphWeightFilter(){}
int RequestData(vtkInformation *,
vtkInformationVector **,
vtkInformationVector *);
// Description:
// Compute the weight on the 'graph' for a particular 'edge'.
// This is a pure virtual function that must be implemented in subclasses.
virtual float ComputeWeight(vtkGraph* const graph, const vtkEdgeType& edge) const = 0;
// Description:
// Ensure that the 'graph' is has all properties that are needed to compute
// the weights. For example, in vtkGraphWeightEuclideanDistanceFilter,
// 'graph' must have Points set for each vertex, as the ComputeWeight
// function calls GetPoint.
virtual bool CheckRequirements(vtkGraph* const graph) const;
private:
vtkGraphWeightFilter(const vtkGraphWeightFilter&); // Not implemented.
void operator=(const vtkGraphWeightFilter&); // Not implemented.
};
#endif
| {
"content_hash": "470ce1ce0f1fe22e4c0d6dd63b6f3c66",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 88,
"avg_line_length": 35.41509433962264,
"alnum_prop": 0.7490676611614278,
"repo_name": "ReneLaqua/GlobalThresholding",
"id": "af83c2b0ee89a3553fca98af955fbb49f8c075b8",
"size": "2467",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "OsiriXAPI.framework/Versions/A/Headers/vtkGraphWeightFilter.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "12321588"
},
{
"name": "C++",
"bytes": "16273917"
},
{
"name": "Objective-C",
"bytes": "1004775"
}
],
"symlink_target": ""
} |
module RuboCop
module Cop
module Cask
# Common functionality for checking url stanzas.
module OnUrlStanza
extend Forwardable
include CaskHelp
def on_cask(cask_block)
@cask_block = cask_block
toplevel_stanzas.select(&:url?).each do |stanza|
on_url_stanza(stanza)
end
end
private
attr_reader :cask_block
def_delegators :cask_block,
:toplevel_stanzas
end
end
end
end
| {
"content_hash": "b5d0942ab9890cfa2afde6f0f0c1eb82",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 58,
"avg_line_length": 20,
"alnum_prop": 0.5596153846153846,
"repo_name": "Homebrew/brew",
"id": "ad26e67a110f796723bb328d2367ab65cef75540",
"size": "566",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "Library/Homebrew/rubocops/cask/mixin/on_url_stanza.rb",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "1861"
},
{
"name": "HTML",
"bytes": "29627"
},
{
"name": "PostScript",
"bytes": "485"
},
{
"name": "Roff",
"bytes": "108869"
},
{
"name": "Ruby",
"bytes": "3852532"
},
{
"name": "Shell",
"bytes": "265377"
},
{
"name": "Swift",
"bytes": "2161"
}
],
"symlink_target": ""
} |
Add this line to your application’s Gemfile:
```ruby
gem "pghero"
```
And mount the dashboard in your `config/routes.rb`:
```ruby
mount PgHero::Engine, at: "pghero"
```
Be sure to [secure the dashboard](#authentication) in production.
### Suggested Indexes
PgHero can suggest indexes to add. To enable, add to your Gemfile:
```ruby
gem "pg_query", ">= 2"
```
and make sure [query stats](#query-stats) are enabled. Read about how it works [here](Suggested-Indexes.md).
## Authentication
For basic authentication, set the following variables in your environment or an initializer.
```ruby
ENV["PGHERO_USERNAME"] = "link"
ENV["PGHERO_PASSWORD"] = "hyrule"
```
For Devise, use:
```ruby
authenticate :user, -> (user) { user.admin? } do
mount PgHero::Engine, at: "pghero"
end
```
## Query Stats
Query stats can be enabled from the dashboard. If you run into issues, [view the guide](Query-Stats.md).
## Historical Query Stats
To track query stats over time, run:
```sh
rails generate pghero:query_stats
rails db:migrate
```
And schedule the task below to run every 5 minutes.
```sh
rake pghero:capture_query_stats
```
Or with a scheduler like Clockwork, use:
```ruby
PgHero.capture_query_stats
```
After this, a time range slider will appear on the Queries tab.
The query stats table can grow large over time. Remove old stats with:
```sh
rake pghero:clean_query_stats
```
or:
```rb
PgHero.clean_query_stats
```
By default, query stats are stored in your app’s database. Change this with:
```ruby
ENV["PGHERO_STATS_DATABASE_URL"]
```
## Historical Space Stats
To track space stats over time, run:
```sh
rails generate pghero:space_stats
rails db:migrate
```
And schedule the task below to run once a day.
```sh
rake pghero:capture_space_stats
```
Or with a scheduler like Clockwork, use:
```ruby
PgHero.capture_space_stats
```
## System Stats
CPU usage, IOPS, and other stats are available for:
- [Amazon RDS](#amazon-rds)
- [Google Cloud SQL](#google-cloud-sql)
- [Azure Database](#azure-database)
Heroku and Digital Ocean do not currently have an API for database metrics.
### Amazon RDS
Add this line to your application’s Gemfile:
```ruby
gem "aws-sdk-cloudwatch"
```
By default, your application’s AWS credentials are used. To use separate credentials, add these variables to your environment:
```sh
PGHERO_ACCESS_KEY_ID=my-access-key
PGHERO_SECRET_ACCESS_KEY=my-secret
PGHERO_REGION=us-east-1
```
Finally, specify your DB instance identifier.
```sh
PGHERO_DB_INSTANCE_IDENTIFIER=my-instance
```
This requires the following IAM policy:
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": "cloudwatch:GetMetricStatistics",
"Resource": "*"
}
]
}
```
### Google Cloud SQL
Add this line to your application’s Gemfile:
```ruby
gem "google-cloud-monitoring-v3"
```
Enable the [Monitoring API](https://console.cloud.google.com/apis/library/monitoring.googleapis.com) and set up your credentials:
```sh
GOOGLE_APPLICATION_CREDENTIALS=path/to/credentials.json
```
Finally, specify your database id:
```sh
PGHERO_GCP_DATABASE_ID=my-project:my-instance
```
This requires the Monitoring Viewer role.
### Azure Database
Add this line to your application’s Gemfile:
```ruby
gem "azure_mgmt_monitor"
```
[Get your credentials](https://docs.microsoft.com/en-us/azure/active-directory/develop/howto-create-service-principal-portal) and add these variables to your environment:
```sh
AZURE_TENANT_ID=...
AZURE_CLIENT_ID=...
AZURE_CLIENT_SECRET=...
AZURE_SUBSCRIPTION_ID=...
```
Finally, set your database resource URI:
```sh
PGHERO_AZURE_RESOURCE_ID=/subscriptions/<subscription-id>/resourceGroups/<resource-group>/providers/Microsoft.DBforPostgreSQL/servers/<database-id>
```
This requires the Monitoring Reader role.
## Customization & Multiple Databases
To customize PgHero, create `config/pghero.yml` with:
```sh
rails generate pghero:config
```
This allows you to specify multiple databases and change thresholds. Thresholds can be set globally or per-database.
With Postgres < 12, if multiple databases are in the same instance and use historical query stats, PgHero should be configured to capture them together.
```yml
databases:
primary:
url: ...
other:
url: ...
capture_query_stats: primary
```
## Permissions
We recommend [setting up a dedicated user](Permissions.md) for PgHero.
## Methods
Insights
```ruby
PgHero.running_queries
PgHero.long_running_queries
PgHero.index_usage
PgHero.invalid_indexes
PgHero.missing_indexes
PgHero.unused_indexes
PgHero.unused_tables
PgHero.database_size
PgHero.relation_sizes
PgHero.index_hit_rate
PgHero.table_hit_rate
PgHero.total_connections
```
Kill queries
```ruby
PgHero.kill(pid)
PgHero.kill_long_running_queries
PgHero.kill_all
```
Query stats
```ruby
PgHero.query_stats_enabled?
PgHero.enable_query_stats
PgHero.disable_query_stats
PgHero.reset_query_stats
PgHero.query_stats
PgHero.slow_queries
```
Suggested indexes
```ruby
PgHero.suggested_indexes
PgHero.best_index(query)
```
Security
```ruby
PgHero.ssl_used?
```
Replication
```ruby
PgHero.replica?
PgHero.replication_lag
```
If you have multiple databases, specify a database with:
```ruby
PgHero.databases["db2"].running_queries
```
## Users
**Note:** It’s unsafe to pass user input to these commands.
Create a user
```ruby
PgHero.create_user("link")
# {password: "zbTrNHk2tvMgNabFgCo0ws7T"}
```
This generates and returns a secure password. The user has full access to the `public` schema.
Read-only access
```ruby
PgHero.create_user("epona", readonly: true)
```
Set the password
```ruby
PgHero.create_user("zelda", password: "hyrule")
```
Grant access to only certain tables
```ruby
PgHero.create_user("navi", tables: ["triforce"])
```
Drop a user
```ruby
PgHero.drop_user("ganondorf")
```
## Upgrading
### 3.0.0
Breaking changes
- Changed `capture_query_stats` to only reset stats for current database in Postgres 12+
- Changed `reset_query_stats` to only reset stats for current database (use `reset_instance_query_stats` to reset stats for entire instance)
- Removed `access_key_id`, `secret_access_key`, `region`, and `db_instance_identifier` methods (use `aws_` prefixed methods instead)
## Bonus
- See where queries come from with [Marginalia](https://github.com/basecamp/marginalia) - comments appear on the Live Queries tab.
- Get weekly news and articles with [Postgres Weekly](https://postgresweekly.com/)
- Optimize your configuration with [PgTune](https://pgtune.leopard.in.ua/) and [pgBench](https://www.postgresql.org/docs/devel/static/pgbench.html)
| {
"content_hash": "7b8c36616c8e7820cfb041da32075d1c",
"timestamp": "",
"source": "github",
"line_count": 342,
"max_line_length": 170,
"avg_line_length": 19.523391812865498,
"alnum_prop": 0.7284708701512655,
"repo_name": "ankane/pghero",
"id": "38db151c71cbed2a4d3cef79114f94b242f29f3b",
"size": "6724",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "guides/Rails.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "46720"
},
{
"name": "Ruby",
"bytes": "143672"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices.WindowsRuntime;
using Windows.ApplicationModel;
using Windows.ApplicationModel.Activation;
using Windows.Foundation;
using Windows.Foundation.Collections;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Controls.Primitives;
using Windows.UI.Xaml.Data;
using Windows.UI.Xaml.Input;
using Windows.UI.Xaml.Media;
using Windows.UI.Xaml.Navigation;
namespace Transitions
{
/// <summary>
/// Provides application-specific behavior to supplement the default Application class.
/// </summary>
sealed partial class App : Application
{
/// <summary>
/// Initializes the singleton application object. This is the first line of authored code
/// executed, and as such is the logical equivalent of main() or WinMain().
/// </summary>
public App()
{
this.InitializeComponent();
this.Suspending += OnSuspending;
}
/// <summary>
/// Invoked when the application is launched normally by the end user. Other entry points
/// will be used such as when the application is launched to open a specific file.
/// </summary>
/// <param name="e">Details about the launch request and process.</param>
protected override void OnLaunched(LaunchActivatedEventArgs e)
{
Frame rootFrame = Window.Current.Content as Frame;
// Do not repeat app initialization when the Window already has content,
// just ensure that the window is active
if (rootFrame == null)
{
// Create a Frame to act as the navigation context and navigate to the first page
rootFrame = new Frame();
rootFrame.NavigationFailed += OnNavigationFailed;
if (e.PreviousExecutionState == ApplicationExecutionState.Terminated)
{
//TODO: Load state from previously suspended application
}
// Place the frame in the current Window
Window.Current.Content = rootFrame;
}
if (e.PrelaunchActivated == false)
{
if (rootFrame.Content == null)
{
// When the navigation stack isn't restored navigate to the first page,
// configuring the new page by passing required information as a navigation
// parameter
rootFrame.Navigate(typeof(MainPage), e.Arguments);
}
// Ensure the current window is active
Window.Current.Activate();
}
}
/// <summary>
/// Invoked when Navigation to a certain page fails
/// </summary>
/// <param name="sender">The Frame which failed navigation</param>
/// <param name="e">Details about the navigation failure</param>
void OnNavigationFailed(object sender, NavigationFailedEventArgs e)
{
throw new Exception("Failed to load Page " + e.SourcePageType.FullName);
}
/// <summary>
/// Invoked when application execution is being suspended. Application state is saved
/// without knowing whether the application will be terminated or resumed with the contents
/// of memory still intact.
/// </summary>
/// <param name="sender">The source of the suspend request.</param>
/// <param name="e">Details about the suspend request.</param>
private void OnSuspending(object sender, SuspendingEventArgs e)
{
var deferral = e.SuspendingOperation.GetDeferral();
//TODO: Save application state and stop any background activity
deferral.Complete();
}
}
}
| {
"content_hash": "1eadc378d729dffeaa882d8cddccb29a",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 99,
"avg_line_length": 39.23,
"alnum_prop": 0.6143257710935508,
"repo_name": "ProfessionalCSharp/ProfessionalCSharp7",
"id": "dace3dde49c51359a25a254a2032ad964d14594a",
"size": "3925",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "Styles/StylesSamples/Transitions/App.xaml.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP.NET",
"bytes": "210"
},
{
"name": "Batchfile",
"bytes": "21458"
},
{
"name": "C#",
"bytes": "1710305"
},
{
"name": "CSS",
"bytes": "8195"
},
{
"name": "Dockerfile",
"bytes": "439"
},
{
"name": "HTML",
"bytes": "199109"
},
{
"name": "JavaScript",
"bytes": "214593"
},
{
"name": "TypeScript",
"bytes": "9812"
}
],
"symlink_target": ""
} |
<!--
Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
for details. All rights reserved. Use of this source code is governed by a
BSD-style license that can be found in the LICENSE file.
-->
<div class="acx-scoreboard">
<material-button class="scroll-button scroll-left-button"
(trigger)="scrollScorecardBarLeft()"
[class.hide]="atScorecardBarStart"
*ngIf="isScrollable">
<glyph class="scroll-icon"
icon="chevron_left"
[attr.aria-label]="scrollScorecardBarBack">
</glyph>
</material-button>
<div scorecardBar class="scorecard-bar">
<ng-content></ng-content>
</div>
<material-button class="scroll-button scroll-right-button"
(trigger)="scrollScorecardBarRight()"
[class.hide]="atScorecardBarEnd"
*ngIf="isScrollable">
<glyph class="scroll-icon"
icon="chevron_right"
[attr.aria-label]="scrollScorecardBarForward">
</glyph>
</material-button>
</div>
| {
"content_hash": "d342f4535aad422812a76f4a8c185c72",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 75,
"avg_line_length": 38.035714285714285,
"alnum_prop": 0.6206572769953052,
"repo_name": "rgroult/MobDistTool",
"id": "b8db060a2f31bf7e0f492ef5edb333ab39b938f9",
"size": "1065",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "build/web/packages/angular2_components/src/components/scorecard/scoreboard.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "717689"
},
{
"name": "Dart",
"bytes": "464586"
},
{
"name": "HTML",
"bytes": "205251"
},
{
"name": "JavaScript",
"bytes": "3391058"
},
{
"name": "Perl",
"bytes": "24256"
},
{
"name": "Python",
"bytes": "4582"
},
{
"name": "Shell",
"bytes": "535"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?><!--
~ Copyright (c) 2009-2011, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
~
~ Licensed under the Apache License, Version 2.0 (the "License");
~ you may not use this file except in compliance with the License.
~ You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<parent>
<groupId>org.wso2.carbon.apimgt</groupId>
<artifactId>feature-categories</artifactId>
<version>5.0.4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>org.wso2.apimgt.store.category.feature</artifactId>
<packaging>pom</packaging>
<name>API Store ${project.version}</name>
<url>http://wso2.org</url>
<description>Includes features for Store component.</description>
<build>
<plugins>
<plugin>
<groupId>org.wso2.maven</groupId>
<artifactId>carbon-p2-plugin</artifactId>
<version>${carbon.p2.plugin.version}</version>
<executions>
<execution>
<id>1-p2-feature-generation</id>
<phase>package</phase>
<goals>
<goal>p2-feature-gen</goal>
</goals>
<configuration>
<id>org.wso2.apimgt.store.category</id>
<propertiesFile>../../../etc/feature.properties</propertiesFile>
<adviceFile>
<properties>
<propertyDef>org.eclipse.equinox.p2.type.category:true</propertyDef>
<propertyDef>org.wso2.carbon.p2.category.type:composite</propertyDef>
<propertyDef>org.wso2.carbon.p2.category.property:true</propertyDef>
</properties>
</adviceFile>
<importFeatures>
<importFeatureDef>org.jaggeryjs:${jaggery.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.core.common:${carbon.kernel.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.core.server:${carbon.kernel.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.core.runtime:${carbon.kernel.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.core:${carbon.kernel.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.as.runtimes.cxf:4.2.2</importFeatureDef>
<importFeatureDef>org.wso2.am.styles:${apimserver.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.apimgt.core:${carbon.apimgt.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.apimgt.store:${carbon.apimgt.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.application.mgt.server:${carbon.commons.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.application.mgt.synapse.server:${carbon.mediation.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.application.deployer.synapse:${carbon.mediation.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.system.statistics:4.2.2</importFeatureDef>
<importFeatureDef>org.wso2.carbon.datasource.server:4.2.1</importFeatureDef>
<importFeatureDef>org.wso2.carbon.ec2-client:${carbon.kernel.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.endpoint.server:4.2.1</importFeatureDef>
<importFeatureDef>org.wso2.carbon.event.common:${carbon.kernel.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.event.server:4.2.2</importFeatureDef>
<importFeatureDef>org.wso2.carbon.governance.lifecycle.management:4.2.1</importFeatureDef>
<importFeatureDef>org.wso2.carbon.governance.metadata:4.2.2</importFeatureDef>
<importFeatureDef>org.wso2.carbon.identity.authenticator.saml2.sso.server:${carbon.identity.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.identity.authenticator.saml2.sso.ui:${carbon.identity.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.identity.core:${carbon.identity.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.identity.oauth:${carbon.identity.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.identity.self.registration.server:${carbon.identity.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.identity.application.authentication.framework.server:${carbon.identity.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.jaxws.webapp.mgt:${carbon.deployment.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.localentry.server:4.2.1</importFeatureDef>
<importFeatureDef>org.wso2.carbon.logging.mgt:${carbon.commons.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.message.flows:${carbon.kernel.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.mex:${carbon.kernel.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.module.mgt.server:${carbon.kernel.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.ntask.core:4.2.4</importFeatureDef>
<importFeatureDef>org.wso2.carbon.registry.associations.dependencies:${carbon.kernel.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.registry.community:4.2.2</importFeatureDef>
<importFeatureDef>org.wso2.carbon.registry.core:4.2.2</importFeatureDef>
<importFeatureDef>org.wso2.carbon.registry.extensions:4.2.1</importFeatureDef>
<importFeatureDef>org.wso2.carbon.registry.resource.properties:4.2.1</importFeatureDef>
<importFeatureDef>org.wso2.carbon.registry.ui.menu:${carbon.kernel.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.registry.ui.menu.governance:${carbon.kernel.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.registry.ws:${carbon.kernel.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.relay:${carbon.kernel.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.security.mgt:4.2.5</importFeatureDef>
<importFeatureDef>org.wso2.carbon.service.mgt.server:4.2.2</importFeatureDef>
<importFeatureDef>org.wso2.carbon.stratos.common:2.2.1</importFeatureDef>
<importFeatureDef>org.wso2.carbon.stratos.deployment:2.2.1</importFeatureDef>
<importFeatureDef>org.wso2.carbon.task.server:${carbon.kernel.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.um.ws.service.client:4.2.2</importFeatureDef>
<importFeatureDef>org.wso2.carbon.um.ws.service.server:4.2.2</importFeatureDef>
<importFeatureDef>org.wso2.carbon.user.mgt:4.2.3</importFeatureDef>
<importFeatureDef>org.wso2.carbon.webapp.mgt:4.2.2</importFeatureDef>
<importFeatureDef>org.wso2.carbon.xfer:${carbon.kernel.version}</importFeatureDef>
<importFeatureDef>org.wso2.stratos.apimgt.dashboard.ui:${apimserver.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.registry.contentsearch:4.2.2</importFeatureDef>
<importFeatureDef>org.wso2.carbon.identity.oauth.common:4.2.3</importFeatureDef>
<importFeatureDef>org.apache.synapse.wso2:2.1.2-wso2v6</importFeatureDef>
<importFeatureDef>org.apache.synapse.transport.nhttp:2.1.2-wso2v6</importFeatureDef>
<importFeatureDef>org.wso2.carbon.soaptracer:${carbon.kernel.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.transport.mgt:${carbon.kernel.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.transport.mail:${carbon.kernel.version}</importFeatureDef>
<importFeatureDef>org.wso2.carbon.transport.nhttp:4.2.1</importFeatureDef>
<importFeatureDef>org.wso2.carbon.transport.jms:4.2.1</importFeatureDef>
<importFeatureDef>org.wso2.carbon.rest.api.server:4.2.2</importFeatureDef>
<importFeatureDef>org.wso2.carbon.identity.provider.server:4.2.3</importFeatureDef>
<importFeatureDef>org.wso2.carbon.identity.application.authenticator.basicauth.server:4.2.3</importFeatureDef>
<importFeatureDef>org.wso2.carbon.tryit:4.2.1</importFeatureDef>
<importFeatureDef>org.wso2.carbon.claim.mgt:4.2.2</importFeatureDef>
<importFeatureDef>org.wso2.carbon.forum.server:1.0.1</importFeatureDef>
</importFeatures>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
| {
"content_hash": "5638b3eeafae8d89ca633d5aa8a30c85",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 201,
"avg_line_length": 65.78947368421052,
"alnum_prop": 0.7393142857142857,
"repo_name": "charithag/carbon-apimgt",
"id": "19ca918490018683ff68d9c3262f88dc2fffc4e3",
"size": "8750",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "features/categories/store/pom.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "72417"
},
{
"name": "Batchfile",
"bytes": "7251"
},
{
"name": "CSS",
"bytes": "1694146"
},
{
"name": "HTML",
"bytes": "163297"
},
{
"name": "Handlebars",
"bytes": "179244"
},
{
"name": "Java",
"bytes": "3655118"
},
{
"name": "JavaScript",
"bytes": "14540423"
},
{
"name": "PLSQL",
"bytes": "88321"
},
{
"name": "PLpgSQL",
"bytes": "29272"
},
{
"name": "Shell",
"bytes": "19726"
},
{
"name": "Thrift",
"bytes": "1730"
},
{
"name": "XSLT",
"bytes": "94760"
}
],
"symlink_target": ""
} |
function Send-EXRSK4BMessage{
param(
[Parameter(Position = 0, Mandatory = $false)]
[string]
$MailboxName,
[Parameter(Position = 2, Mandatory = $false)]
[string]
$AccessToken,
[Parameter(Position = 3, Mandatory = $false)]
[string]
$Subject,
[Parameter(Position = 4, Mandatory = $false)]
[string]
$ToSipAddress,
[Parameter(Position = 5, Mandatory = $false)]
[string]
$Message
)
process{
$MessageObject = @{}
$MessageObject.Add("rel","service:startMessaging");
if(![String]::IsNullOrEmpty($Subject)){
$MessageObject.Add("subject",$Subject)
}else{
$MessageObject.Add("subject","")
}
$MessageObject.Add("operationId",[guid]::NewGuid().toString())
if(![String]::IsNullOrEmpty($ToSipAddress)){
$MessageObject.Add("to",("sip:" + $ToSipAddress))
}else{
throw ("Error you need to specify a repcipient")
}
if(![String]::IsNullOrEmpty($Message)){
$MessageObject.Add("message",("data:text/plain," + $Message))
}else{
throw ("Error you need to specify a Message to send")
}
$HttpClient = Get-HTTPClient -MailboxName $Script:SK4BMailboxName
$HttpClient.DefaultRequestHeaders.Authorization = New-Object System.Net.Http.Headers.AuthenticationHeaderValue("Bearer", (ConvertFrom-SecureStringCustom -SecureToken $Script:SK4BToken.access_token));
$URL = ("https://" + $Script:SK4BServerName + $Script:SK4BApplication._embedded.communication._links.startMessaging.href)
$HttpClient.DefaultRequestHeaders.Add('X-MS-RequiresMinResourceVersion','2')
$PostJson = New-Object System.Net.Http.StringContent((ConvertTo-Json $MessageObject -Depth 9), [System.Text.Encoding]::UTF8, "application/json")
$ClientResult = $HttpClient.PostAsync([Uri]$URL,$PostJson)
return ConvertFrom-Json $ClientResult.Result.Content.ReadAsStringAsync().Result
}
}
| {
"content_hash": "3161752bfe8512b5e0c7d77f0c56b711",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 207,
"avg_line_length": 40.96078431372549,
"alnum_prop": 0.6175203446625179,
"repo_name": "gscales/Exch-Rest",
"id": "65b2e6b1baf156113133a6b1b2181f30023f9055",
"size": "2089",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "functions/SK4B/Send-EXRSK4BMessage.ps1",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PowerShell",
"bytes": "702262"
}
],
"symlink_target": ""
} |
try {
var util = require('util');
if (typeof util.inherits !== 'function') throw '';
module.exports = util.inherits;
} catch (e) {
module.exports = require('./inherits_browser.js');
}
| {
"content_hash": "e2fd99727d820db01649a736a11362f2",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 54,
"avg_line_length": 28.571428571428573,
"alnum_prop": 0.61,
"repo_name": "ionutbarau/petstore",
"id": "79b1d9cbbb97fa4c80857f3cf3379b968f474006",
"size": "200",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "petstore-app/src/main/resources/static/node_modules/inherits/inherits.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "5006"
},
{
"name": "CSS",
"bytes": "344"
},
{
"name": "HTML",
"bytes": "10564"
},
{
"name": "Java",
"bytes": "40634"
},
{
"name": "JavaScript",
"bytes": "11738"
},
{
"name": "Roff",
"bytes": "52499364"
},
{
"name": "Shell",
"bytes": "7058"
}
],
"symlink_target": ""
} |
"""* Default settings for jscribe.
@module jscribe.conf.defaults
"""
INPUT_PATHS = ["./"] # paths to source files that should be discovered
IGNORE_PATHS_REGEX = []
FILE_REGEX = r".*?[.]js$"
FILE_IGNORE_REGEX = None
DOCUMENTATION_OUTPUT_PATH = "./"
DOC_STRING_REGEX = [r"[/][*][*]", r"(?<!\\)[*][/]"]
TAG_REGEX = r"^\s*?[@](?P<tag>.*?)\s"
IGNORE_INVALID_TAGS = False
TEMPLATE = "default"
TEMPLATE_SETTINGS = {
"SHOW_LINE_NUMBER": True,
"FOOTER_TEXT": "Footer text",
"TITLE": "My Docs Title",
"ELEMENT_TEMPLATES": {},
}
TAG_SETTINGS_PATH = "jscribe.conf.jstagsettings"
OUTPUT_ENCODING = "utf-8"
LANGUAGE = "javascript"
GENERATOR = "html"
ALL_SOURCE_FILES = False
| {
"content_hash": "db03b2a87832d44d936a559dd0748b65",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 71,
"avg_line_length": 28.958333333333332,
"alnum_prop": 0.6201438848920864,
"repo_name": "mindbrave/jscribe",
"id": "d50c64b5db189c7294cb7172ff1d2d0e5701f2b0",
"size": "742",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jscribe/conf/defaults.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "17820"
},
{
"name": "Python",
"bytes": "267951"
}
],
"symlink_target": ""
} |
import * as tl from "vsts-task-lib/task";
import * as nutil from "nuget-task-common/Utility";
import nuGetGetter = require("nuget-task-common/NuGetToolGetter");
import * as path from "path";
import * as ngToolRunner from "nuget-task-common/NuGetToolRunner2";
import * as packUtils from "./Common/NuGetPackUtilities";
import INuGetCommandOptions from "./Common/INuGetCommandOptions";
class PackOptions implements INuGetCommandOptions {
constructor(
public nuGetPath: string,
public outputDir: string,
public includeReferencedProjects: boolean,
public version: string,
public properties: string[],
public createSymbolsPackage: boolean,
public verbosity: string,
public configFile: string,
public environment: ngToolRunner.NuGetEnvironmentSettings
) { }
}
export async function run(nuGetPath: string): Promise<void> {
nutil.setConsoleCodePage();
let searchPatternInput = tl.getPathInput("searchPatternPack", true);
let configuration = tl.getInput("configurationToPack");
let versioningScheme = tl.getInput("versioningScheme");
let includeRefProj = tl.getBoolInput("includeReferencedProjects");
let versionEnvVar = tl.getInput("versionEnvVar");
let majorVersion = tl.getInput("requestedMajorVersion");
let minorVersion = tl.getInput("requestedMinorVersion");
let patchVersion = tl.getInput("requestedPatchVersion");
let timezone = tl.getInput("packTimezone");
let propertiesInput = tl.getInput("buildProperties");
let verbosity = tl.getInput("verbosityPack");
let createSymbolsPackage = tl.getBoolInput("includeSymbols");
let outputDir = undefined;
try
{
// If outputDir is not provided then the root working directory is set by default.
// By requiring it, it will throw an error if it is not provided and we can set it to undefined.
outputDir = tl.getPathInput("outputDir", true);
}
catch(error)
{
outputDir = undefined;
}
try{
if(versioningScheme !== "off" && includeRefProj)
{
tl.warning(tl.loc("Warning_AutomaticallyVersionReferencedProjects"));
}
let version: string = undefined;
switch(versioningScheme)
{
case "off":
break;
case "byPrereleaseNumber":
tl.debug(`Getting prerelease number`);
let nowDateTimeString = packUtils.getNowDateString(timezone);
version = `${majorVersion}.${minorVersion}.${patchVersion}-CI-${nowDateTimeString}`;
break;
case "byEnvVar":
tl.debug(`Getting version from env var: ${versionEnvVar}`);
version = tl.getVariable(versionEnvVar);
if(!version)
{
tl.setResult(tl.TaskResult.Failed, tl.loc("Error_NoValueFoundForEnvVar"));
break;
}
break;
case "byBuildNumber":
tl.debug("Getting version number from build number")
if(tl.getVariable("SYSTEM_HOSTTYPE") === "release")
{
tl.setResult(tl.TaskResult.Failed, tl.loc("Error_AutomaticallyVersionReleases"));
return;
}
let buildNumber: string = tl.getVariable("BUILD_BUILDNUMBER");
tl.debug(`Build number: ${buildNumber}`);
let versionRegex = /\d+\.\d+\.\d+(?:\.\d+)?/;
let versionMatches = buildNumber.match(versionRegex);
if (!versionMatches)
{
tl.setResult(tl.TaskResult.Failed, tl.loc("Error_NoVersionFoundInBuildNumber"));
return;
}
if (versionMatches.length > 1)
{
tl.warning(tl.loc("Warning_MoreThanOneVersionInBuildNumber"))
}
version = versionMatches[0];
break;
}
tl.debug(`Version to use: ${version}`);
if(outputDir && !tl.exist(outputDir))
{
tl.debug(`Creating output directory: ${outputDir}`);
tl.mkdirP(outputDir);
}
let useLegacyFind: boolean = tl.getVariable("NuGet.UseLegacyFindFiles") === "true";
let filesList: string[] = [];
if (!useLegacyFind) {
let findOptions: tl.FindOptions = <tl.FindOptions>{};
let matchOptions: tl.MatchOptions = <tl.MatchOptions>{};
let searchPatterns: string[] = nutil.getPatternsArrayFromInput(searchPatternInput);
filesList = tl.findMatch(undefined, searchPatterns, findOptions, matchOptions);
}
else {
filesList = nutil.resolveFilterSpec(searchPatternInput);
}
tl.debug(`Found ${filesList.length} files`);
filesList.forEach(file => {
tl.debug(`--File: ${file}`);
});
let props: string[] = [];
if(configuration && configuration !== "$(BuildConfiguration)")
{
props.push(`Configuration=${configuration}`);
}
if(propertiesInput)
{
props = props.concat(propertiesInput.split(";"));
}
let environmentSettings: ngToolRunner.NuGetEnvironmentSettings = {
credProviderFolder: null,
extensionsDisabled: true
};
let packOptions = new PackOptions(
nuGetPath,
outputDir,
includeRefProj,
version,
props,
createSymbolsPackage,
verbosity,
undefined,
environmentSettings);
for (const file of filesList) {
await packAsync(file, packOptions);
}
} catch (err) {
tl.error(err);
tl.setResult(tl.TaskResult.Failed, tl.loc("Error_PackageFailure"));
}
}
function packAsync(file: string, options: PackOptions): Q.Promise<number> {
console.log(tl.loc("Info_AttemptingToPackFile") + file);
let nugetTool = ngToolRunner.createNuGetToolRunner(options.nuGetPath, options.environment, undefined);
nugetTool.arg("pack");
nugetTool.arg(file);
nugetTool.arg("-NonInteractive");
nugetTool.arg("-OutputDirectory");
if (options.outputDir) {
nugetTool.arg(options.outputDir);
}
else {
nugetTool.arg(path.dirname(file));
}
if (options.properties && options.properties.length > 0) {
nugetTool.arg("-Properties");
nugetTool.arg(options.properties.join(";"));
}
nugetTool.argIf(options.includeReferencedProjects, "-IncludeReferencedProjects")
nugetTool.argIf(options.createSymbolsPackage, "-Symbols")
if (options.version) {
nugetTool.arg("-version");
nugetTool.arg(options.version);
}
if (options.verbosity && options.verbosity !== "-") {
nugetTool.arg("-Verbosity");
nugetTool.arg(options.verbosity);
}
return nugetTool.exec();
} | {
"content_hash": "63b35298f437cbdb9f47c32790a86ff0",
"timestamp": "",
"source": "github",
"line_count": 202,
"max_line_length": 106,
"avg_line_length": 34.99009900990099,
"alnum_prop": 0.5966327108092813,
"repo_name": "kkdawkins/vsts-tasks",
"id": "5369991c6fb51895d9b549f5f8ea4fe4b10f994e",
"size": "7068",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Tasks/NuGetCommand/nugetpack.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "757"
},
{
"name": "CSS",
"bytes": "3780"
},
{
"name": "HTML",
"bytes": "47002"
},
{
"name": "Java",
"bytes": "1575"
},
{
"name": "JavaScript",
"bytes": "56579"
},
{
"name": "PowerShell",
"bytes": "1179150"
},
{
"name": "Shell",
"bytes": "23356"
},
{
"name": "TypeScript",
"bytes": "3190905"
}
],
"symlink_target": ""
} |
<FindBugsFilter>
<!-- Example: http://findbugs.sourceforge.net/manual/filter.html#d0e2103 -->
<Match>
<!-- Reason: http://trac.kieker-monitoring.net/ticket/1352 -->
<Class name="kieker.test.tools.junit.traceAnalysis.filter.visualization.dependencyGraph.ComponentAllocationDependencyGraphTest" />
<Bug pattern=" BC_UNCONFIRMED_CAST_OF_RETURN_VALUE" />
</Match>
<Match>
<!-- Reason: http://trac.kieker-monitoring.net/ticket/1352 -->
<Class name="kieker.test.tools.junit.traceAnalysis.filter.visualization.descriptions.DescriptionDecoratorFilterTest" />
<Bug pattern=" BC_UNCONFIRMED_CAST_OF_RETURN_VALUE" />
</Match>
<Match>
<!-- Reason: http://trac.kieker-monitoring.net/ticket/1352 -->
<Class name="kieker.test.tools.junit.traceAnalysis.filter.visualization.traceColoring.TraceColoringFilterTest" />
<Bug pattern=" BC_UNCONFIRMED_CAST_OF_RETURN_VALUE" />
</Match>
<Match>
<Class name="kieker.tools.traceAnalysis.filter.sessionReconstruction.SessionReconstructionFilter" />
<Bug pattern="AT_OPERATION_SEQUENCE_ON_CONCURRENT_ABSTRACTION" />
</Match>
<Match>
<Class name="kieker.analysis.plugin.reader.tcp.TCPReader" />
<Bug pattern="UWF_FIELD_NOT_INITIALIZED_IN_CONSTRUCTOR" />
</Match>
<Match>
<Class name="kieker.tools.AbstractCommandLineTool" />
<Bug pattern="DM_EXIT" />
</Match>
<Match>
<Class name="kieker.tools.traceAnalysis.TraceAnalysisGUI" />
<Bug pattern="SE_BAD_FIELD" />
</Match>
<Match>
<Class name="kieker.tools.traceAnalysis.gui.FinalStep" />
<Bug pattern="SE_BAD_FIELD" />
</Match>
<Match>
<Class name="kieker.tools.traceAnalysis.gui.FinalStep" />
<Bug pattern="DM_DEFAULT_ENCODING" />
</Match>
<Match>
<Package name="~kieker\.common\.record.*" />
<Bug pattern="EI_EXPOSE_REP" />
</Match>
<Match>
<Package name="~kieker\.tools\.opad\.record.*" />
<Bug pattern="EI_EXPOSE_REP" />
</Match>
<Match>
<Class name="kieker.common.record.misc.RegistryRecord" />
<Bug pattern="DM_DEFAULT_ENCODING" />
</Match>
<Match>
<Class name="kieker.analysis.plugin.IPlugin$PluginInputPortReference" />
<Bug pattern="EI_EXPOSE_REP2" />
</Match>
<Match>
<Class name="kieker.analysis.plugin.IPlugin$PluginInputPortReference" />
<Bug pattern="EI_EXPOSE_REP" />
</Match>
<Match>
<Class name="kieker.monitoring.probe.aspectj.AbstractAspectJProbe" />
<Bug pattern="RV_RETURN_VALUE_OF_PUTIFABSENT_IGNORED" />
</Match>
<Match>
<Class name="kieker.common.record.AbstractMonitoringRecord" />
<Bug pattern="RV_RETURN_VALUE_OF_PUTIFABSENT_IGNORED" />
</Match>
<Match>
<Class name="kieker.test.monitoring.junit.probe.spring.executions.jetty.TestSpringMethodInterceptor" />
<Method name="testIt" params="" returns="void" />
<Bug pattern="NP_LOAD_OF_KNOWN_NULL_VALUE" />
</Match>
<Match>
<Class name="kieker.test.analysis.junit.plugin.filter.forward.TestStringBufferFilter" />
<Bug pattern="DM_STRING_CTOR" />
</Match>
<Match>
<Class name="kieker.tools.traceAnalysis.repository.DescriptionRepository" />
<Method name="splitLine" params="java.lang.String" returns="java.lang.String[]" />
<Bug pattern="PZLA_PREFER_ZERO_LENGTH_ARRAYS" />
</Match>
<Match>
<Class name="kieker.analysis.plugin.filter.forward.util.KiekerHashMap$Segment" />
<Bug pattern="SE_BAD_FIELD" />
</Match>
<Match>
<Class name="kieker.analysis.model.MetaModelHandler" />
<Method name="javaToMetaModel" params="java.util.Collection,java.util.Collection,java.util.Collection,java.util.Collection,java.lang.String,kieker.common.configuration.Configuration" returns="kieker.analysis.model.analysisMetaModel.MIProject" />
<Bug pattern="REC_CATCH_EXCEPTION" />
</Match>
<Match>
<Package name="~kieker\.analysis\.model\.analysisMetaModel.*" />
<Bug pattern="IC_SUPERCLASS_USES_SUBCLASS_DURING_INITIALIZATION" />
</Match>
<Match>
<Package name="~kieker\.analysis\.model\.analysisMetaModel.*" />
<Bug pattern="RCN_REDUNDANT_NULLCHECK_OF_NULL_VALUE" />
</Match>
<Match>
<Package name="~kieker\.analysis\.model\.analysisMetaModel.*" />
<Bug pattern="BC_UNCONFIRMED_CAST" />
</Match>
<Match>
<Package name="~kieker\.analysis\.model\.analysisMetaModel.*" />
<Bug pattern="NP_LOAD_OF_KNOWN_NULL_VALUE" />
</Match>
<Match>
<Package name="~kieker\.analysis\.model\.analysisMetaModel.*" />
<Bug pattern="MS_PKGPROTECT" />
</Match>
<Match>
<Class name="kieker.analysis.plugin.filter.flow.TraceEventRecords" />
<!--<Method name="getTraceEvents" params="" returns="kieker.common.record.flow.trace.AbstractTraceEvent" />-->
<Bug pattern="EI_EXPOSE_REP" />
</Match>
<Match>
<Class name="kieker.analysis.plugin.filter.flow.TraceEventRecords" />
<!--<Method name="<init>" params="kieker.common.record.flow.trace.Trace,kieker.common.record.flow.trace.AbstractTraceEvent" returns="void" />-->
<Bug pattern="EI_EXPOSE_REP2" />
</Match>
<Match>
<Class name="kieker.analysis.plugin.reader.database.DbReader" />
<Method name="read" params="" returns="boolean" />
<Bug pattern="SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE" />
</Match>
<Match>
<Class name="kieker.analysis.plugin.reader.database.DbReader" />
<Method name="table2record" params="java.sql.Connection,java.lang.String,java.lang.Class" returns="void" />
<Bug pattern="SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE" />
</Match>
<Match>
<Class name="kieker.analysis.plugin.reader.filesystem.FSZipReader" />
<Method name="readBinaryFile" params="java.io.DataInputStream" returns="void" />
<Bug pattern="REC_CATCH_EXCEPTION" />
</Match>
<Match>
<Class name="kieker.analysis.plugin.reader.filesystem.FSZipReader" />
<Method name="readAsciiFile" params="java.io.BufferedReader" returns="void" />
<Bug pattern="REC_CATCH_EXCEPTION" />
</Match>
<Match>
<Class name="kieker.analysis.plugin.reader.filesystem.FSDirectoryReader" />
<Method name="processNormalInputFile" params="java.io.File" returns="void" />
<Bug pattern="REC_CATCH_EXCEPTION" />
</Match>
<Match>
<Class name="kieker.analysis.plugin.reader.filesystem.FSDirectoryReader" />
<Method name="processBinaryInputFile" params="java.io.File,kieker.common.util.filesystem.BinaryCompressionMethod" returns="void" />
<Bug pattern="REC_CATCH_EXCEPTION" />
</Match>
<Match>
<Class name="kieker.analysis.plugin.reader.filesystem.FSDirectoryReader$2" />
<Bug pattern="SIC_INNER_SHOULD_BE_STATIC_ANON" />
</Match>
<Match>
<Class name="kieker.analysis.plugin.reader.filesystem.FSReader" />
<Method name="read" params="" returns="boolean" />
<Bug pattern="NN_NAKED_NOTIFY" />
</Match>
<Match>
<Class name="kieker.analysis.plugin.reader.filesystem.FSReader" />
<Method name="newMonitoringRecord" params="kieker.common.record.IMonitoringRecord" returns="boolean" />
<Bug pattern="WA_NOT_IN_LOOP" />
</Match>
<Match>
<Class name="kieker.analysis.plugin.reader.jms.JMSReader" />
<Method name="read" params="" returns="boolean" />
<Bug pattern="REC_CATCH_EXCEPTION" />
</Match>
<Match>
<Class name="kieker.analysis.plugin.reader.jmx.JMXReader" />
<Method name="read2" params="" returns="boolean" />
<Bug pattern="DE_MIGHT_IGNORE" />
</Match>
<Match>
<Class name="kieker.common.logging.LogFactory" />
<Method name="<clinit>" params="" returns="void" />
<Bug pattern="DE_MIGHT_IGNORE" />
</Match>
<Match>
<Class name="kieker.common.logging.LogImplWebguiLogging" />
<Method name="addMessage" params="java.lang.String,java.lang.String,java.lang.Throwable" returns="void" />
<Bug pattern="RV_RETURN_VALUE_IGNORED" />
</Match>
<Match>
<Class name="kieker.analysis.display.XYPlot" />
<Method name="setEntry" params="java.lang.Object, java.lang.Number" returns="void" />
<Bug pattern="RV_RETURN_VALUE_IGNORED" />
</Match>
<Match>
<Class name="kieker.monitoring.core.controller.JMXController" />
<Method name="<init>" params="kieker.common.configuration.Configuration" returns="void" />
<Bug pattern="REC_CATCH_EXCEPTION" />
</Match>
<Match>
<Class name="kieker.monitoring.core.controller.SamplingController$1" />
<Bug pattern="SIC_INNER_SHOULD_BE_STATIC_ANON" />
</Match>
<Match>
<Class name="kieker.common.util.registry.Lookup" />
<Field name="array" />
<Bug pattern="VO_VOLATILE_REFERENCE_TO_ARRAY" />
</Match>
<Match>
<Class name="kieker.common.util.registry.Registry" />
<Field name="eArrayCached" />
<Bug pattern="VO_VOLATILE_REFERENCE_TO_ARRAY" />
</Match>
<Match>
<Class name="kieker.monitoring.probe.servlet.SessionAndTraceRegistrationFilter" />
<Method name="doFilter" params="javax.servlet.ServletRequest, javax.servlet.ServletResponse, javax.servlet.FilterChain" returns="void" />
<Bug pattern="ES_COMPARING_STRINGS_WITH_EQ" />
</Match>
<Match>
<Class name="kieker.monitoring.writer.database.DBWriterHelper" />
<Method name="createTable" params="java.lang.String,java.lang.Class[]" returns="java.lang.String" />
<Bug pattern="SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE" />
</Match>
<Match>
<Class name="kieker.monitoring.writer.database.DBWriterHelper" />
<Method name="createIndexTable" params="" returns="void" />
<Bug pattern="SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE" />
</Match>
<Match>
<Class name="kieker.monitoring.writer.database.DbWriterThread" />
<Method name="consume" params="kieker.common.record.IMonitoringRecord" returns="void" />
<Bug pattern="OBL_UNSATISFIED_OBLIGATION" />
</Match>
<Match>
<Class name="kieker.monitoring.writer.database.DbWriterThread" />
<Method name="consume" params="kieker.common.record.IMonitoringRecord" returns="void" />
<Bug pattern="SQL_PREPARED_STATEMENT_GENERATED_FROM_NONCONSTANT_STRING" />
</Match>
<Match>
<Class name="kieker.monitoring.writer.database.SyncDbWriter" />
<Method name="newMonitoringRecord" params="kieker.common.record.IMonitoringRecord" returns="boolean" />
<Bug pattern="OBL_UNSATISFIED_OBLIGATION" />
</Match>
<Match>
<Class name="kieker.monitoring.writer.database.SyncDbWriter" />
<Method name="newMonitoringRecord" params="kieker.common.record.IMonitoringRecord"
returns="boolean" />
<Bug pattern="SQL_PREPARED_STATEMENT_GENERATED_FROM_NONCONSTANT_STRING" />
</Match>
<Match>
<Class name="~kieker.test.*" />
<Bug pattern="UWF_FIELD_NOT_INITIALIZED_IN_CONSTRUCTOR" />
</Match>
<Match>
<Class name="kieker.analysis.plugin.filter.record.MonitoringThroughputFilter" />
<Bug pattern="VO_VOLATILE_INCREMENT" />
</Match>
<Match>
<Class name="kieker.test.monitoring.junit.core.sampler.TestPeriodicSampling$1" />
<Bug pattern="SIC_INNER_SHOULD_BE_STATIC_ANON" />
</Match>
<Match>
<Class name="kieker.test.monitoring.junit.core.sampler.TestPeriodicSampling$2" />
<Bug pattern="SIC_INNER_SHOULD_BE_STATIC_ANON" />
</Match>
<Match>
<Class name="kieker.test.tools.junit.writeRead.filesystem.KiekerLogDirFilter$1" />
<Bug pattern="SIC_INNER_SHOULD_BE_STATIC_ANON" />
</Match>
<Match>
<Class name="kieker.test.tools.junit.bridge.AbstractConnectorTest" />
<Bug pattern="UWF_FIELD_NOT_INITIALIZED_IN_CONSTRUCTOR" />
</Match>
<Match>
<Class name="kieker.tools.KaxViz" />
<Method name="main" params="java.lang.String[]" returns="void" />
<Bug pattern="REC_CATCH_EXCEPTION" />
</Match>
<Match>
<Class name="kieker.tools.traceAnalysis.TraceAnalysisTool" />
<Method name="dispatchTasks" params="" returns="boolean" />
<Bug pattern="REC_CATCH_EXCEPTION" />
</Match>
<Match>
<Class name="kieker.tools.traceAnalysis.filter.traceReconstruction.TraceReconstructionFilter$1" />
<Bug pattern="SIC_INNER_SHOULD_BE_STATIC_ANON" />
</Match>
<Match>
<Class name="org.apache.commons.logging.impl.Jdk14LoggerPatched" />
<Method name="getLog" params="java.lang.String" returns="org.apache.commons.logging.Log" />
<Bug pattern="REC_CATCH_EXCEPTION" />
</Match>
<Match>
<Class name="kieker.evaluation.benchmark.Benchmark" />
<Method name="parseAndInitializeArguments" params="java.lang.String[]" returns="void" />
<Bug pattern="REC_CATCH_EXCEPTION" />
</Match>
<Match>
<Class name="kieker.examples.userguide.ch2bookstore.manual.BookstoreAnalysisStarter" />
<Method name="main" params="java.lang.String[]" returns="void" />
<Bug pattern="REC_CATCH_EXCEPTION" />
</Match>
<Match>
<Class name="kieker.examples.userguide.ch3and4bookstore.MyPipeReader" />
<Method name="read" params="" returns="boolean" />
<Bug pattern="REC_CATCH_EXCEPTION" />
</Match>
<Match>
<Class name="kieker.examples.userguide.ch3and4bookstore.PipeData" />
<Bug pattern="EI_EXPOSE_REP" />
</Match>
<Match>
<Class name="kieker.tools.bridge.LookupEntity" />
<Bug pattern="EI_EXPOSE_REP" />
</Match>
<Match>
<Class name="kieker.examples.userguide.ch3and4bookstore.PipeData" />
<Bug pattern="EI_EXPOSE_REP2" />
</Match>
<Match>
<Class name="kieker.tools.bridge.LookupEntity" />
<Bug pattern="EI_EXPOSE_REP2" />
</Match>
<Match>
<Class name="kieker.tools.bridge.cli.PrivilegedClassLoaderAction" />
<Bug pattern="EI_EXPOSE_REP2" />
</Match>
<Match>
<Package name="~kieker\.analysis\.model\.analysisMetaModel.*" />
<Bug pattern="NM_CONFUSING" />
</Match>
<Match>
<Class name="kieker.tools.traceAnalysis.TraceAnalysisGUI" />
<Method name="loadCurrentConfiguration" params="" returns="void" />
<Bug pattern="REC_CATCH_EXCEPTION" />
</Match>
<!-- For the public TYPES field of our records. -->
<Match>
<Class name="~kieker.common.record.*" />
<Field name="TYPES" />
<Bug pattern="MS_PKGPROTECT, MS_MUTABLE_ARRAY" />
</Match>
<Match>
<Class name="~kieker.tools.opad.record.*" />
<Field name="TYPES" />
<Bug pattern="MS_PKGPROTECT, MS_MUTABLE_ARRAY" />
</Match>
<Match>
<Class name="kieker.tools.resourceMonitor.ResourceMonitor$1" />
<Bug pattern="SIC_INNER_SHOULD_BE_STATIC_ANON" />
</Match>
<Match>
<Class name="kieker.tools.resourceMonitor.ResourceMonitor$2" />
<Bug pattern="SIC_INNER_SHOULD_BE_STATIC_ANON" />
</Match>
</FindBugsFilter>
| {
"content_hash": "fb47474f026897d9da2272a64ab86098",
"timestamp": "",
"source": "github",
"line_count": 430,
"max_line_length": 247,
"avg_line_length": 32.544186046511626,
"alnum_prop": 0.7090896098327855,
"repo_name": "HaStr/kieker",
"id": "14a7ef098162c744873adcca04c010a77fd7454c",
"size": "13994",
"binary": false,
"copies": "2",
"ref": "refs/heads/stable",
"path": "config/fb-filter.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7208"
},
{
"name": "CSS",
"bytes": "24891"
},
{
"name": "HTML",
"bytes": "103357"
},
{
"name": "Java",
"bytes": "5376692"
},
{
"name": "Python",
"bytes": "14400"
},
{
"name": "R",
"bytes": "11710"
},
{
"name": "Roff",
"bytes": "94560"
},
{
"name": "Shell",
"bytes": "86559"
},
{
"name": "TeX",
"bytes": "215941"
}
],
"symlink_target": ""
} |
package org.jetbrains.plugins.groovy.extensions.debugger;
import com.intellij.psi.PsiFile;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.sun.jdi.ReferenceType;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.groovy.lang.psi.GroovyFile;
/**
* Class to extend debugger functionality to handle various Groovy scripts
*
* @author ilyas
*/
public abstract class ScriptPositionManagerHelper {
public static final ExtensionPointName<ScriptPositionManagerHelper> EP_NAME = ExtensionPointName.create("org.intellij.groovy.positionManagerDelegate");
public abstract boolean isAppropriateRuntimeName(@NotNull String runtimeName);
@NotNull
public String getOriginalScriptName(ReferenceType refType, @NotNull final String runtimeName) {
return runtimeName;
}
public abstract boolean isAppropriateScriptFile(@NotNull PsiFile scriptFile);
/**
* @return Runtime script name
*/
@NotNull
public abstract String getRuntimeScriptName(@NotNull String originalName, GroovyFile groovyFile);
/**
* @return Posiible script to debug through in project scope if there wer not found other by standarrd methods
*/
@Nullable
public abstract PsiFile getExtraScriptIfNotFound(ReferenceType refType, @NotNull String runtimeName, Project project);
}
| {
"content_hash": "1430246dc7d034491019883b650cf8f7",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 153,
"avg_line_length": 34.21951219512195,
"alnum_prop": 0.8018531717747683,
"repo_name": "jexp/idea2",
"id": "b1826ce01f9a559852d11a9945e99a5c3375de0b",
"size": "2003",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plugins/groovy/src/org/jetbrains/plugins/groovy/extensions/debugger/ScriptPositionManagerHelper.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "6350"
},
{
"name": "C#",
"bytes": "103"
},
{
"name": "C++",
"bytes": "30760"
},
{
"name": "Erlang",
"bytes": "10"
},
{
"name": "Java",
"bytes": "72888555"
},
{
"name": "JavaScript",
"bytes": "910"
},
{
"name": "PHP",
"bytes": "133"
},
{
"name": "Perl",
"bytes": "6523"
},
{
"name": "Shell",
"bytes": "4068"
}
],
"symlink_target": ""
} |
layout: project
---
| {
"content_hash": "b4b9a457bca461b56c664e28d2977ef8",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 15,
"avg_line_length": 7,
"alnum_prop": 0.6190476190476191,
"repo_name": "csdsbrusel/csdsbrusel.github.io",
"id": "23943bc5a6eec392236b1a608644d566c8853c67",
"size": "25",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "projects/faure-requiem.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "15255"
},
{
"name": "HTML",
"bytes": "5733"
}
],
"symlink_target": ""
} |
#include "lbfgs.h"
#ifdef DEBUG
#include <iterator>
#include <iostream>
#include <iomanip>
using std::cerr;
using std::endl;
using std::setw;
using std::setprecision;
#endif //DEBUG
namespace npl {
/**
* @brief Constructor for optimizer function.
*
* @param dim Dimension of state variable
* @param valfunc Function which computes the energy of the underlying
* mathematical function
* @param gradfunc Function which computes the gradient of energy in the
* underlying mathematical function
* @param callback Function which should be called at the end of each
* iteration (for instance, to debug)
*/
LBFGSOpt::LBFGSOpt(size_t dim, const ValFunc& valfunc,
const GradFunc& gradfunc, const CallBackFunc& callback)
: Optimizer(dim, valfunc, gradfunc, callback),
m_lsearch(valfunc)
{
m_hist.clear();
opt_H0inv = VectorXd::Ones(dim);
opt_histsize = 6;
opt_ls_s = 1;
opt_ls_beta = 0.5;
opt_ls_sigma = 1e-5;
};
/**
* @brief Constructor for optimizer function.
*
* @param dim Dimension of state variable
* @param valfunc Function which computes the energy of the underlying
* mathematical function
* @param gradfunc Function which computes the gradient of energy in the
* underlying mathematical function
* @param gradAndValFunc
* Function which computes the energy and gradient in the
* underlying mathematical function
* @param callback Function which should be called at the end of each
* iteration (for instance, to debug)
*/
LBFGSOpt::LBFGSOpt(size_t dim, const ValFunc& valfunc, const GradFunc& gradfunc,
const ValGradFunc& gradAndValFunc, const CallBackFunc& callback)
: Optimizer(dim, valfunc, gradfunc, gradAndValFunc, callback),
m_lsearch(valfunc)
{
m_hist.clear();
opt_H0inv = VectorXd::Ones(dim);
opt_histsize = 6;
opt_ls_s = 1;
opt_ls_beta = 0.5;
opt_ls_sigma = 1e-5;
};
/**
* @brief Function for computing the hessian recursively
* Based on the algorithm from Numerical Optimization (Nocedal)
*
* @param gamma Scale of initial (H0)
* @param g Direction from right multiplication so far
* @param it Position in history list
*
* @return Direction (d) after right multiplying d by H_k, the hessian
* estimate for position it,
*/
VectorXd LBFGSOpt::hessFuncTwoLoop(double gamma, const VectorXd& g)
{
VectorXd q = g;
VectorXd alpha(m_hist.size());
// iterate backward in time (forward in list)
int ii = 0;
for(auto it = m_hist.cbegin(); it != m_hist.cend(); ++it, ++ii) {
double rho = std::get<0>(*it);
const VectorXd& y = std::get<1>(*it); // or q
const VectorXd& s = std::get<2>(*it); // or p
alpha[ii] = rho*s.dot(q);
q -= alpha[ii]*y;
}
VectorXd r = opt_H0inv.cwiseProduct(q)*gamma;
// oldest first
ii = m_hist.size()-1;
for(auto it = m_hist.crbegin(); it != m_hist.crend(); ++it, --ii) {
double rho = std::get<0>(*it);
const VectorXd& y = std::get<1>(*it); // or q
const VectorXd& s = std::get<2>(*it); // or p
double beta = rho*y.dot(r);
r += s*(alpha[ii]-beta);
}
return r;
}
/**
* @brief Function for computing the hessian recursively
*
* @param gamma Scale of initial (H0)
* @param d Direction from right multiplication so far
* @param it Position in history list
*
* @return Direction (d) after right multiplying d by H_k, the hessian
* estimate for position it,
*/
/**
* @brief Optimize Based on a value function and gradient function
* separately. When both gradient and value are needed it will call update,
* when it needs just the gradient it will call gradFunc, and when it just
* needs the value it will cal valFunc. This is always the most efficient,
* assuming there is additional cost of computing the gradient or value, but
* its obviously more complicated.
*
* Paper: On the limited memory BFGS method for large scale optimization
* By: Liu, Dong C., Nocedal, Jorge
*
* @return StopReason
*/
StopReason LBFGSOpt::optimize()
{
double gradstop = this->stop_G >= 0 ? this->stop_G : 0;
double stepstop = this->stop_X >= 0 ? this->stop_X : 0;
double valstop = this->stop_F >= 0 ? this->stop_F : -1;
// update linesearch with minimum step, an options
m_lsearch.opt_s = opt_ls_s;
m_lsearch.opt_minstep = stepstop;
m_lsearch.opt_beta = opt_ls_beta;
m_lsearch.opt_sigma = opt_ls_sigma;
VectorXd gk(state_x.rows()); // gradient
double f_xk; // value at current position
double f_xkm1; // value at previous position
VectorXd pk, qk, dk, vk;
VectorXd H0 = VectorXd::Ones(state_x.rows());
double gamma = 1;
//D(k+1) += p(k)p(k)' - D(k)q(k)q(k)'D(k) + Z(k)T(k)v(k)v(k)'
// ---------- -----------------
// (p(k)'q(k)) q(k)'D(k)q(k)
m_compFG(state_x, f_xk, gk);
dk = -gk;
for(int iter = 0; stop_Its <= 0 || iter < stop_Its; iter++) {
// reset history if grad . gk > 0 (ie they go the same direction)
if(gk.dot(dk) >= 0) {
dk = -gk;
m_hist.clear();
#ifdef DEBUG
cerr << "Clearing LBFGS History!" << endl;
#endif //DEBUG
}
// compute step size
double alpha = m_lsearch.search(f_xk, state_x, gk, dk);
pk = alpha*dk;
if(alpha == 0 || pk.squaredNorm() < stepstop*stepstop) {
return ENDSTEP;
}
// step
state_x += pk;
// update gradient, value
qk = -gk;
f_xkm1 = f_xk;
m_compFG(state_x, f_xk, gk);
qk += gk;
if(gk.squaredNorm() < gradstop*gradstop)
return ENDGRAD;
if(abs(f_xk - f_xkm1) < valstop)
return ENDVALUE;
if(f_xk < this->stop_F_under || f_xk > this->stop_F_over)
return ENDABSVALUE;
// update history
m_hist.push_front(std::make_tuple(1./qk.dot(pk), qk, pk));
if(m_hist.size() > opt_histsize)
m_hist.pop_back();
/*
* update direction
* qk - change in gradient (yk in original paper)
* pk - change in x (sk in original paper)
*/
gamma = qk.dot(pk)/qk.squaredNorm();
dk = -hessFuncTwoLoop(gamma, gk);
m_callback(dk, f_xk, gk, iter);
}
return ENDFAIL;
}
}
| {
"content_hash": "9ab41466b1a3c2ddc7dc0bec561fb2b1",
"timestamp": "",
"source": "github",
"line_count": 211,
"max_line_length": 80,
"avg_line_length": 30.412322274881518,
"alnum_prop": 0.6026180458158018,
"repo_name": "MicahChambers/cppConvexOptimizers",
"id": "f16415a43b07a4e4a475f39f7db2f72a128e67d8",
"size": "7323",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/lbfgs.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "56746"
},
{
"name": "Python",
"bytes": "4575"
}
],
"symlink_target": ""
} |
module Fastlane
class LaneManager
# @param platform The name of the platform to execute
# @param lane_name The name of the lane to execute
# @param parameters [Hash] The parameters passed from the command line to the lane
# @param env Dot Env Information
def self.cruise_lane(platform, lane, parameters = nil, env = nil)
raise 'lane must be a string' unless (lane.is_a?(String) or lane.nil?)
raise 'platform must be a string' unless (platform.is_a?(String) or platform.nil?)
raise 'parameters must be a hash' unless (parameters.is_a?(Hash) or parameters.nil?)
ff = Fastlane::FastFile.new(File.join(Fastlane::FastlaneFolder.path, 'Fastfile'))
unless (ff.is_platform_block?lane rescue false) # rescue, because this raises an exception if it can't be found at all
# maybe the user specified a default platform
# We'll only do this, if the lane specified isn't a platform, as we want to list all platforms then
platform ||= Actions.lane_context[Actions::SharedValues::DEFAULT_PLATFORM]
end
if not platform and lane
# Either, the user runs a specific lane in root or want to auto complete the available lanes for a platform
# e.g. `fastlane ios` should list all available iOS actions
if ff.is_platform_block?lane
platform = lane
lane = nil
end
end
platform, lane = choose_lane(ff, platform) unless lane
load_dot_env(env)
started = Time.now
e = nil
begin
ff.runner.execute(lane, platform, parameters)
rescue => ex
Helper.log.info 'Variable Dump:'.yellow
Helper.log.info Actions.lane_context
Helper.log.fatal ex
e = ex
end
duration = ((Time.now - started) / 60.0).round
finish_fastlane(ff, duration, e)
return ff
end
# All the finishing up that needs to be done
def self.finish_fastlane(ff, duration, error)
ff.runner.did_finish
# Finished with all the lanes
Fastlane::JUnitGenerator.generate(Fastlane::Actions.executed_actions)
print_table(Fastlane::Actions.executed_actions)
unless error
if duration > 5
Helper.log.info "fastlane.tools just saved you #{duration} minutes! 🎉".green
else
Helper.log.info 'fastlane.tools finished successfully 🎉'.green
end
else
Helper.log.fatal 'fastlane finished with errors'.red
raise error
end
end
# Print a table as summary of the executed actions
def self.print_table(actions)
require 'terminal-table'
rows = []
actions.each_with_index do |current, i|
name = current[:name][0..60]
rows << [i + 1, name, current[:time].to_i]
end
puts ""
puts Terminal::Table.new(
title: "fastlane summary".green,
headings: ["Step", "Action", "Time (in s)"],
rows: rows
)
puts ""
end
# Lane chooser if user didn't provide a lane
# @param platform: is probably nil, but user might have called `fastlane android`, and only wants to list those actions
def self.choose_lane(ff, platform)
loop do
Helper.log.error "You must provide a lane to drive. Available lanes:"
available = ff.runner.available_lanes(platform)
available.each_with_index do |lane, index|
puts "#{index + 1}) #{lane}"
end
i = $stdin.gets.strip.to_i - 1
if i >= 0 and (available[i] rescue nil)
selection = available[i]
Helper.log.info "Driving the lane #{selection}. Next time launch fastlane using `fastlane #{selection}`".yellow
platform = selection.split(' ')[0]
lane_name = selection.split(' ')[1]
unless lane_name # no specific platform, just a root lane
lane_name = platform
platform = nil
end
return platform, lane_name # yeah
end
Helper.log.error "Invalid input. Please enter the number of the lane you want to use".red
end
end
def self.load_dot_env(env)
require 'dotenv'
Actions.lane_context[Actions::SharedValues::ENVIRONMENT] = env
# Making sure the default '.env' and '.env.default' get loaded
env_file = File.join(Fastlane::FastlaneFolder.path || "", '.env')
env_default_file = File.join(Fastlane::FastlaneFolder.path || "", '.env.default')
Dotenv.load(env_file, env_default_file)
# Loads .env file for the environment passed in through options
if env
env_file = File.join(Fastlane::FastlaneFolder.path || "", ".env.#{env}")
Helper.log.info "Loading from '#{env_file}'".green
Dotenv.overload(env_file)
end
end
end
end
| {
"content_hash": "721590bf1ffdb1bc1c956708bf9af1ae",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 124,
"avg_line_length": 34.6231884057971,
"alnum_prop": 0.6280870657178735,
"repo_name": "samgreen/fastlane",
"id": "60288ea6d0cd4f36e79df8bbdd2b89addf2c5e61",
"size": "4784",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "lib/fastlane/lane_manager.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "11235"
},
{
"name": "Ruby",
"bytes": "485204"
}
],
"symlink_target": ""
} |
INTERFACE ppbus;
#
# Do low level i/o operations
#
METHOD u_char io {
device_t dev;
int opcode;
u_char *addr;
int cnt;
u_char byte;
};
#
# Execution of a microsequence
#
METHOD int exec_microseq {
device_t dev;
struct ppb_microseq **ppb_microseq;
};
#
# Reset EPP timeout
#
METHOD int reset_epp {
device_t dev;
}
#
# Set chipset mode
#
METHOD int setmode {
device_t dev;
int mode;
}
#
# Synchronize ECP FIFO
#
METHOD int ecp_sync {
device_t dev;
}
#
# Do chipset dependent low level read
#
METHOD int read {
device_t dev;
char *buf;
int len;
int how;
}
#
# Do chipset dependent low level write
#
METHOD int write {
device_t dev;
char *buf;
int len;
int how;
}
| {
"content_hash": "770ba876961415f7b2f1d457de516e8d",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 38,
"avg_line_length": 11.064516129032258,
"alnum_prop": 0.6632653061224489,
"repo_name": "dcui/FreeBSD-9.3_kernel",
"id": "128e72fb8bd6bf7974e3b0b47e06ae257f7f05d2",
"size": "2154",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sys/dev/ppbus/ppbus_if.m",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "1740660"
},
{
"name": "Awk",
"bytes": "135150"
},
{
"name": "Batchfile",
"bytes": "158"
},
{
"name": "C",
"bytes": "189969174"
},
{
"name": "C++",
"bytes": "2113755"
},
{
"name": "DTrace",
"bytes": "19810"
},
{
"name": "Forth",
"bytes": "188128"
},
{
"name": "Groff",
"bytes": "147703"
},
{
"name": "Lex",
"bytes": "65561"
},
{
"name": "Logos",
"bytes": "6310"
},
{
"name": "Makefile",
"bytes": "594606"
},
{
"name": "Mathematica",
"bytes": "9538"
},
{
"name": "Objective-C",
"bytes": "527964"
},
{
"name": "PHP",
"bytes": "2404"
},
{
"name": "Perl",
"bytes": "3348"
},
{
"name": "Python",
"bytes": "7091"
},
{
"name": "Shell",
"bytes": "43402"
},
{
"name": "SourcePawn",
"bytes": "253"
},
{
"name": "Yacc",
"bytes": "160534"
}
],
"symlink_target": ""
} |
A wrapper for the wkhtmltopdf HTML to PDF converter
| {
"content_hash": "66e9528f26f3dec5d426400b9d821a6a",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 51,
"avg_line_length": 52,
"alnum_prop": 0.8269230769230769,
"repo_name": "blobor/NWkHtmlToX",
"id": "6def138d76f5c4c3096f512ef31525a2ee353be1",
"size": "65",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
package io.reactivex.rxjava3.tck;
import org.reactivestreams.Publisher;
import org.testng.annotations.Test;
import io.reactivex.rxjava3.core.Flowable;
@Test
public class ConcatTckTest extends BaseTck<Long> {
@Override
public Publisher<Long> createPublisher(long elements) {
return
Flowable.concat(
Flowable.fromIterable(iterate(elements / 2)),
Flowable.fromIterable(iterate(elements - elements / 2))
)
;
}
}
| {
"content_hash": "e07b9f213d1369310134cfb12c4d9c19",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 75,
"avg_line_length": 23.545454545454547,
"alnum_prop": 0.6312741312741312,
"repo_name": "ReactiveX/RxJava",
"id": "c5eb360c731e394be368312d8dd2e85beed8e88d",
"size": "1121",
"binary": false,
"copies": "2",
"ref": "refs/heads/3.x",
"path": "src/test/java/io/reactivex/rxjava3/tck/ConcatTckTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "12875"
},
{
"name": "Java",
"bytes": "16375272"
},
{
"name": "Shell",
"bytes": "3148"
}
],
"symlink_target": ""
} |
package io.novaordis.gld.api.sampler.metrics;
import io.novaordis.gld.api.sampler.metrics.MeasureUnit;
import io.novaordis.gld.api.sampler.metrics.MetricType;
public class MockMeasureUnit implements MeasureUnit
{
// Constants -------------------------------------------------------------------------------------------------------
// Static ----------------------------------------------------------------------------------------------------------
// Attributes ------------------------------------------------------------------------------------------------------
private String abbreviation;
private MetricType metricType;
// Constructors ----------------------------------------------------------------------------------------------------
public MockMeasureUnit(String abbreviation)
{
this.abbreviation = abbreviation;
}
public MockMeasureUnit(MetricType metricType, String abbreviation)
{
this.abbreviation = abbreviation;
this.metricType = metricType;
}
// MeasureUnit implementation --------------------------------------------------------------------------------------
@Override
public String abbreviation()
{
return abbreviation;
}
@Override
public MetricType getMetricType()
{
return metricType;
}
// Public ----------------------------------------------------------------------------------------------------------
// Package protected -----------------------------------------------------------------------------------------------
// Protected -------------------------------------------------------------------------------------------------------
// Private ---------------------------------------------------------------------------------------------------------
// Inner classes ---------------------------------------------------------------------------------------------------
}
| {
"content_hash": "4aea24e0678d9498f6f5e2d800543e29",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 120,
"avg_line_length": 35.54545454545455,
"alnum_prop": 0.319693094629156,
"repo_name": "NovaOrdis/gld",
"id": "c5589198a466b7d93695021be304b8a3d9b006ad",
"size": "2556",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/api/src/test/java/io/novaordis/gld/api/sampler/metrics/MockMeasureUnit.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1983846"
},
{
"name": "Shell",
"bytes": "15581"
}
],
"symlink_target": ""
} |
# grunt-inline[](http://travis-ci.org/miniflycn/grunt-inline)
Brings externally referenced resources, such as js, css and images, into
a single file.
For exmample:
````
<link href="css/style.css?__inline=true" rel="stylesheet" />
````
is replaced with
````
<style>
/* contents of css/style.css */
</style>
```
Javascript references are brought inline, and images in the html
and css blocks are converted to base-64 data: urls.
By default, only urls marked with `__inline` are converted, however this
behavior can be overrided via the `tag:` option.
## Getting Started
This plugin requires Grunt `~0.4.1`
If you haven't used [Grunt](http://gruntjs.com/) before, be sure to check out the [Getting Started](http://gruntjs.com/getting-started) guide, as it explains how to create a [Gruntfile](http://gruntjs.com/sample-gruntfile) as well as install and use Grunt plugins. Once you're familiar with that process, you may install this plugin with this command:
npm install grunt-inline --save-dev
Once the plugin has been installed, it may be enabled inside your Gruntfile with this line of JavaScript:
grunt.loadNpmTasks('grunt-inline');
## The "grunt-inline" task
### Overview
In your project's Gruntfile, add a section named `inline` to the data object passed into `grunt.initConfig()`.
grunt.initConfig({
inline: {
dist: {
src: 'src/index.html',
dest: 'dist/index.html'
}
}
})
### Options
#### dest
If dest is assigned, the the source file will be copied to the destination path. eg:
`src/index.html` will be processed and then copied to `dist/index.html`
```
grunt.initConfig({
inline: {
dist: {
src: 'src/index.html',
dest: 'dist/index.html'
}
}
});
```
### cssmin
If cssmin is assigned true, `.css` will be minified before inlined.
```
grunt.initConfig({
inline: {
dist: {
options:{
cssmin: true
},
src: 'src/index.html',
dest: 'dist/index.html'
}
}
});
```
### tag (defaults to ```__inline```)
Only URLs that contain the value for ```tag``` will be inlined.
Specify ```tag: ''``` to include all urls.
```
grunt.initConfig({
inline: {
dist: {
options:{
tag: ''
},
src: 'src/index.html',
dest: 'dist/index.html']
}
}
});
```
### inlineTagAttributes
Ability to add attributes string to inline tag.
```
grunt.initConfig({
inline: {
dist: {
options:{
inlineTagAttributes: {
js: 'data-inlined="true"', // Adds ```<script data-inlined="true">...</script>```
css: 'data-inlined="true"' // Adds ```<style data-inlined="true">...</style>```
},
src: 'src/index.html',
dest: 'dist/index.html'
}
}
});
```
### uglify
If uglify is assigned true, `.js` file will be minified before inlined.
```
grunt.initConfig({
inline: {
dist: {
options:{
uglify: true
},
src: 'src/index.html',
dest: 'dist/index.html'
}
}
});
```
### exts
Setting an exts array allows multiple file extensions to be processed as
html.
```
grunt.initConfig({
inline: {
dist: {
options:{
exts: ['jade'],
uglify: true
},
src: 'src/index.jade',
dest: 'dist/index.jade'
}
}
});
```
### Usage Examples
> config
grunt.initConfig({
inline: {
dist: {
src: 'src/index.html'
}
}
})
> src/index.html
<html>
<head>
<title>demo</title>
<link href="css/style.css?__inline=true" rel="stylesheet" />
</head>
<body>
<img src="img/icon.png?__inline=true" />
<script src="js/erport.js?__inline=true"></script>
</body>
</html>
> after `grunt inline` was run, it will be something like
<html>
<head>
<title>demo</title>
<style>
.container{
padding: 0;
}
</style>
</head>
<body>
<! -- base64, a terrible mass you know…so just show a little bit ...-->
<img src="idata:image/png;base64data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEMAAAAYCAYAAAChg0BHAA..." />
<script>
var Report = (function(){
return {
init: function(){
}
};
})();
</script>
</body>
</html>
#### inline tag
Suppose there is an `<inline>` tag in `index.html` like bellow
```
<!-- inline tag -->
<inline src="test.html" />
```
The content of `test.html` is
```
<p>I'm inline html</p>
<span>hello world!</span>
```
Then, after the `inline` task is run, the original content in `index.html` will be replaced with
```
<p>I'm inline html</p>
<span>hello world!</span>
```
## Contributing
In lieu of a formal styleguide, take care to maintain the existing coding style. Add unit tests for any new or changed functionality. Lint and test your code using [Grunt](http://gruntjs.com/).
## Release History
* 2015-01-09 v0.3.3 bug fix: when processing files of a folder and then copy the processed content to another destination, the original files are changed unexpectedly, as mentioned in this issue [Support file globbing for input and output](https://github.com/chyingp/grunt-inline/issues/35)
* 2014-06-16 v0.3.1 bug fix: protocol-relative urls in css are messed up
* 2014-06-15 v0.3.1 bug fix: when options.tag is '', then all img tags, whose src attribute has already been inlined will be matched.
* 2014-05-19 v0.3.0 support for new options.exts
* 2014-05-19 v0.2.9 bug fix: options.tag is assigned '', bug image url in css are not converted to base64 formate
* 2014-03-06 v0.2.6 bug fix: script tags like <script src="index.js?__inline">\n</script> were not inlined
* 2014-01-31 v0.2.3 radded tag option, encode url(..) images.
* 2013-10-31 v0.2.2 bug fix: img urls like 'background: url(http://www.example.com/img/bg.png)' will be transformed to 'background: url(url(http://www.example.com/img/bg.png))'
* 2013-10-30 v0.2.1 bug fix: when processing relative file path of img url in css stylesheet, forgot to transform "\" to "/" for windows users
* 2013-10-30 v0.2.0 new feature: Support for minifing js、css when they ar inlined into html.
* 2013-08-30 v0.1.9 bug fix: stylesheets ended with ">" cannot be inlined
* 2013-09-02 v0.1.9 add feature: add options.dest to assign a destination path where the source file will be copied
* 2013-09-02 v0.1.8 add feature: support for `<inline>` tag
| {
"content_hash": "4a2726e2b6f406033a2cebd22b55e817",
"timestamp": "",
"source": "github",
"line_count": 248,
"max_line_length": 351,
"avg_line_length": 26.036290322580644,
"alnum_prop": 0.636053894997677,
"repo_name": "quattromani/Cypress-edenprairie-email",
"id": "5d387b7bdbfecd017d613bde31c87d3f4a9ff606",
"size": "6497",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "node_modules/grunt-inline/README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "220409"
},
{
"name": "HTML",
"bytes": "21083"
},
{
"name": "Handlebars",
"bytes": "6621"
},
{
"name": "JavaScript",
"bytes": "5068"
}
],
"symlink_target": ""
} |
package me.mervin.project.usr;
import java.io.File;
import java.io.IOException;
import me.mervin.core.Global.NetType;
import me.mervin.core.Global.NumberType;
import me.mervin.model.earthquake.RealOFC;
import me.mervin.util.D;
import me.mervin.util.plugin.EarthquakeRenormalization;
public class Earthquake {
/**
* Function:
*
* @param args
*/
public static void main(String[] args) {
// TODO Auto-generated method stub
String root = "../data/Earthquake/";
String srcFile = null;
String dstFile = null;
String dstDir = null;
String temp = null;
File[] fileArr = null;
float f = (float) 0.25;//划分粒度
int count = 500;//仿真次数
EarthquakeRenormalization nr = new EarthquakeRenormalization();
//将网络文件按月份划分
nr.splitFileByMonth(root+"date_zh_sc.txt", root+"/splitByMonth/");
//对每个月份的文件按粒度划分成格子
fileArr = new File(root+"splitByMonth/").listFiles();
for (int i = 0; i < fileArr.length; i++) {
try {
srcFile = fileArr[i].getCanonicalPath();
temp = root+"temp.txt";
dstFile = root+"splitByF/"+f+"/"+fileArr[i].getName();
//划分
nr.renormalizationByLatLon(srcFile, temp, f);
//提取
nr.extractNet(temp, dstFile);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
new File(root+"temp.txt").delete();
fileArr = new File(root+"splitByF/"+f+"/").listFiles();
for (int i = 0; i < fileArr.length; i++) {
try {
srcFile = fileArr[i].getCanonicalPath();
String date = fileArr[i].getName().substring(0, fileArr[i].getName().indexOf('.'));
dstDir = root+"Evolution/"+f+"/"+date+"/";
RealOFC ofc = new RealOFC(srcFile, NetType.UNDIRECTED, NumberType.INTEGER, 1);
ofc.evolution(count, dstDir);
ofc.extractNet(dstDir+"collapse.txt", dstDir+date+"-net-index.txt");
ofc.renormalizationByIndex(dstDir+date+"-net-index.txt", dstDir+date+"-net-latlon.txt", f);
nr.convert2Gexf(dstDir+date+"-net-latlon.txt", dstDir+date+"-net-latlon.gexf", f);//Gephi文件
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
| {
"content_hash": "afbb649aa93e92f8cb5a68f3ab03d951",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 95,
"avg_line_length": 29.27777777777778,
"alnum_prop": 0.6612903225806451,
"repo_name": "mervin0502/Lizard",
"id": "25aafb1283012f04b00973bdab006f7e0e231da6",
"size": "2188",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/me/mervin/project/usr/Earthquake.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1348252"
}
],
"symlink_target": ""
} |
const int nBorderSize = 1;
ClassView::ClassView(void)
{
treeTitle = _T("Project");
functionsTitle = _T("Functions");
typesTitle = _T("Types");
labelsTitle = _T("Labels");
variablesTitle = _T("Variables");
errorsTitle = _T("Errors");
constantsTitle = _T("Constants");
}
int SortByValFunction(const SemanticFunction* t1, const SemanticFunction* t2 )
{
return (t1->function < t2->function);
}
int SortByValType(const SemanticType* t1, const SemanticType* t2 )
{
return (t1->type < t2->type);
}
int SortByValVariable(const SemanticVariable* t1, const SemanticVariable* t2 )
{
return (t1->variable < t2->variable);
}
int SortByValLabel(const SemanticLabel* t1, const SemanticLabel* t2 )
{
return (t1->label < t2->label);
}
BEGIN_MESSAGE_MAP(ClassView, CBCGPDockingControlBar)
//{{AFX_MSG_MAP(CWorkSpaceBar)
ON_WM_CREATE()
ON_WM_SIZE()
ON_WM_PAINT()
ON_WM_CONTEXTMENU()
ON_NOTIFY(NM_DBLCLK, 1, OnClick)
ON_MESSAGE(WM_USER, OnAddText)
ON_MESSAGE(WM_USER+1, OnClear)
ON_MESSAGE(WM_USER+2, OnErrorMessage)
//}}AFX_MSG_MAP
END_MESSAGE_MAP()
void ClassView::OnContextMenu(CWnd* pWnd, CPoint point){} // Supress
void ClassView::ClearExisting()
{
HTREEITEM hNextItem;
// Errors
HTREEITEM hChildItem = m_wndTree.GetChildItem(hErrors);
while (hChildItem != NULL)
{
hNextItem = m_wndTree.GetNextItem(hChildItem, TVGN_NEXT);
m_wndTree.DeleteItem(hChildItem);
hChildItem = hNextItem;
}
// Functions
hChildItem = m_wndTree.GetChildItem(hFunctions);
while (hChildItem != NULL)
{
hNextItem = m_wndTree.GetNextItem(hChildItem, TVGN_NEXT);
m_wndTree.DeleteItem(hChildItem);
hChildItem = hNextItem;
}
// Types
hChildItem = m_wndTree.GetChildItem(hTypes);
while (hChildItem != NULL)
{
hNextItem = m_wndTree.GetNextItem(hChildItem, TVGN_NEXT);
m_wndTree.DeleteItem(hChildItem);
hChildItem = hNextItem;
}
// Labels
hChildItem = m_wndTree.GetChildItem(hLabels);
while (hChildItem != NULL)
{
hNextItem = m_wndTree.GetNextItem(hChildItem, TVGN_NEXT);
m_wndTree.DeleteItem(hChildItem);
hChildItem = hNextItem;
}
// Variables
hChildItem = m_wndTree.GetChildItem(hVariables);
while (hChildItem != NULL)
{
hNextItem = m_wndTree.GetNextItem(hChildItem, TVGN_NEXT);
m_wndTree.DeleteItem(hChildItem);
hChildItem = hNextItem;
}
// Constants
hChildItem = m_wndTree.GetChildItem(hConstants);
while (hChildItem != NULL)
{
hNextItem = m_wndTree.GetNextItem(hChildItem, TVGN_NEXT);
m_wndTree.DeleteItem(hChildItem);
hChildItem = hNextItem;
}
}
LRESULT ClassView::OnClear(WPARAM a, LPARAM b)
{
ClearExisting();
return TRUE;
}
LRESULT ClassView::OnErrorMessage(WPARAM a, LPARAM b)
{
TCHAR* message = (TCHAR*)b;
AfxMessageBox(message, MB_OK | MB_ICONINFORMATION);
delete message;
return TRUE;
}
LRESULT ClassView::OnAddText(WPARAM a, LPARAM b)
{
HTREEITEM selectedTree = m_wndTree.GetSelectedItem();
HTREEITEM newItem;
CString selected = m_wndTree.GetItemText(selectedTree);
ClearExisting();
if(!SemanticParser::m_Mutex.Lock(10))
{
return TRUE;
}
if(Settings::CodeViewOrder == 1)
{
std::sort(SemanticParser::m_vFunctions.begin(), SemanticParser::m_vFunctions.end(), SortByValFunction);
std::sort(SemanticParser::m_vTypes.begin(), SemanticParser::m_vTypes.end(), SortByValType);
std::sort(SemanticParser::m_vLabels.begin(), SemanticParser::m_vLabels.end(), SortByValLabel);
std::sort(SemanticParser::m_vVariables.begin(), SemanticParser::m_vVariables.end(), SortByValVariable);
std::sort(SemanticParser::m_vConstants.begin(), SemanticParser::m_vConstants.end(), SortByValVariable);
}
for(mySEVectorIterator = SemanticParser::m_vMessages.begin(); mySEVectorIterator != SemanticParser::m_vMessages.end(); mySEVectorIterator++)
{
newItem = m_wndTree.InsertItem ((*mySEVectorIterator)->message, 3, 3, hErrors);
if((*mySEVectorIterator)->message == selected)
{
m_wndTree.SelectItem(newItem);
}
}
for(mySFVectorIterator = SemanticParser::m_vFunctions.begin(); mySFVectorIterator != SemanticParser::m_vFunctions.end(); mySFVectorIterator++)
{
newItem = m_wndTree.InsertItem ((*mySFVectorIterator)->definition, 5, 5, hFunctions);
if((*mySFVectorIterator)->definition == selected)
{
m_wndTree.SelectItem(newItem);
}
}
for(mySTVectorIterator = SemanticParser::m_vTypes.begin(); mySTVectorIterator != SemanticParser::m_vTypes.end(); mySTVectorIterator++)
{
newItem = m_wndTree.InsertItem ((*mySTVectorIterator)->type, 5, 5, hTypes);
if((*mySTVectorIterator)->type == selected)
{
m_wndTree.SelectItem(newItem);
}
}
for(mySLVectorIterator = SemanticParser::m_vLabels.begin(); mySLVectorIterator != SemanticParser::m_vLabels.end(); mySLVectorIterator++)
{
newItem = m_wndTree.InsertItem ((*mySLVectorIterator)->label, 5, 5, hLabels);
if((*mySLVectorIterator)->label == selected)
{
m_wndTree.SelectItem(newItem);
}
}
for(mySVVectorIterator = SemanticParser::m_vVariables.begin(); mySVVectorIterator != SemanticParser::m_vVariables.end(); mySVVectorIterator++)
{
newItem = m_wndTree.InsertItem ((*mySVVectorIterator)->variable, 5, 5, hVariables);
if((*mySVVectorIterator)->variable == selected)
{
m_wndTree.SelectItem(newItem);
}
}
for(mySCVectorIterator = SemanticParser::m_vConstants.begin(); mySCVectorIterator != SemanticParser::m_vConstants.end(); mySCVectorIterator++)
{
newItem = m_wndTree.InsertItem ((*mySCVectorIterator)->variable, 5, 5, hConstants);
if((*mySCVectorIterator)->variable == selected)
{
m_wndTree.SelectItem(newItem);
}
}
SemanticParser::m_Mutex.Unlock();
return TRUE;
}
void ClassView::Clear()
{
m_wndTree.DeleteAllItems();
hRoot1 = m_wndTree.InsertItem (treeTitle, 0);
hFunctions = m_wndTree.InsertItem (functionsTitle, 2, 2, hRoot1);
hTypes = m_wndTree.InsertItem (typesTitle, 2, 2, hRoot1);
hLabels = m_wndTree.InsertItem (labelsTitle, 2, 2, hRoot1);
hVariables = m_wndTree.InsertItem (variablesTitle, 2, 2, hRoot1);
hErrors = m_wndTree.InsertItem (errorsTitle, 2, 2, hRoot1);
hConstants = m_wndTree.InsertItem (constantsTitle, 2, 2, hRoot1);
m_wndTree.Expand(hRoot1,TVE_EXPAND);
m_wndTree.Expand(hFunctions,TVE_EXPAND);
m_wndTree.Expand(hTypes,TVE_EXPAND);
m_wndTree.Expand(hLabels,TVE_EXPAND);
m_wndTree.Expand(hVariables,TVE_EXPAND);
m_wndTree.Expand(hErrors,TVE_EXPAND);
m_wndTree.Expand(hConstants,TVE_EXPAND);
}
void ClassView::OnClick( NMHDR * pNotifyStruct, LRESULT * result )
{
if(!SemanticParser::m_Mutex.Lock(1))
{
return;
}
CPoint pt ;
GetCursorPos(&pt) ;
m_wndTree.ScreenToClient(&pt) ;
UINT unFlags = 0 ;
HTREEITEM hItem = m_wndTree.HitTest(pt, &unFlags) ;
if((unFlags & TVHT_ONITEMLABEL) && hItem != NULL)
{
CString name = m_wndTree.GetItemText(hItem);
HTREEITEM hParent = m_wndTree.GetParentItem(hItem);
MainFrame* pMainWnd = (MainFrame*)AfxGetMainWnd();
int line = 0;
if(hParent == hRoot1)
{
SemanticParser::m_Mutex.Unlock();
return;
}
else if(hParent == hErrors)
{
for(mySEVectorIterator = SemanticParser::m_vMessages.begin(); mySEVectorIterator != SemanticParser::m_vMessages.end(); mySEVectorIterator++)
{
if((*mySEVectorIterator)->message == name)
{
line = (*mySEVectorIterator)->line;
break;
}
}
}
else if(hParent == hFunctions)
{
for(mySFVectorIterator = SemanticParser::m_vFunctions.begin(); mySFVectorIterator != SemanticParser::m_vFunctions.end(); mySFVectorIterator++)
{
if((*mySFVectorIterator)->definition == name)
{
line = (*mySFVectorIterator)->line;
break;
}
}
}
else if(hParent == hTypes)
{
for(mySTVectorIterator = SemanticParser::m_vTypes.begin(); mySTVectorIterator != SemanticParser::m_vTypes.end(); mySTVectorIterator++)
{
if((*mySTVectorIterator)->type == name)
{
line = (*mySTVectorIterator)->line;
break;
}
}
}
else if(hParent == hVariables)
{
for(mySVVectorIterator = SemanticParser::m_vVariables.begin(); mySVVectorIterator != SemanticParser::m_vVariables.end(); mySVVectorIterator++)
{
if((*mySVVectorIterator)->variable == name)
{
line = (*mySVVectorIterator)->line;
break;
}
}
}
else if(hParent == hLabels)
{
for(mySLVectorIterator = SemanticParser::m_vLabels.begin(); mySLVectorIterator != SemanticParser::m_vLabels.end(); mySLVectorIterator++)
{
if((*mySLVectorIterator)->label == name)
{
line = (*mySLVectorIterator)->line;
break;
}
}
}
else if(hParent == hConstants)
{
for(mySCVectorIterator = SemanticParser::m_vConstants.begin(); mySCVectorIterator != SemanticParser::m_vConstants.end(); mySCVectorIterator++)
{
if((*mySCVectorIterator)->variable == name)
{
line = (*mySCVectorIterator)->line;
break;
}
}
}
CMDIChildWnd *pChild = (CMDIChildWnd *) pMainWnd->GetActiveFrame();
ASSERT_VALID(pChild);
View* pView = DYNAMIC_DOWNCAST (View, pChild->GetActiveView());
ASSERT_VALID(pView);
if(pView != NULL)
{
pView->HighlightLine(line);
}
}
SemanticParser::m_Mutex.Unlock();
}
int ClassView::OnCreate(LPCREATESTRUCT lpCreateStruct)
{
if (CBCGPDockingControlBar::OnCreate(lpCreateStruct) == -1)
return -1;
m_TreeImages.Create (IDB_CLASS_VIEW, 16, 0, RGB (255, 0, 0));
CRect rectDummy;
rectDummy.SetRectEmpty ();
// Create tree windows.
const DWORD dwViewStyle = WS_CHILD | WS_VISIBLE | TVS_HASLINES | TVS_LINESATROOT | TVS_HASBUTTONS;
if (!m_wndTree.Create (dwViewStyle, rectDummy, this, 1))
{
TRACE0("Failed to create workspace view\n");
return -1; // fail to create
}
m_wndTree.SetImageList(&m_TreeImages, TVSIL_NORMAL);
OnChangeVisualStyle ();
Clear();
return 0;
}
void ClassView::OnSize(UINT nType, int cx, int cy)
{
CBCGPDockingControlBar::OnSize(nType, cx, cy);
// Tab control should cover a whole client area:
m_wndTree.SetWindowPos (NULL, nBorderSize, nBorderSize, cx - 2 * nBorderSize, cy - 2 * nBorderSize, SWP_NOACTIVATE | SWP_NOZORDER);
}
void ClassView::OnPaint()
{
CPaintDC dc(this); // device context for painting
CRect rectTree;
m_wndTree.GetWindowRect (rectTree);
ScreenToClient (rectTree);
rectTree.InflateRect (nBorderSize, nBorderSize);
dc.Draw3dRect (rectTree, ::GetSysColor (COLOR_3DSHADOW), ::GetSysColor (COLOR_3DSHADOW));
}
void ClassView::OnChangeVisualStyle ()
{
m_TreeImages.DeleteImageList ();
CClientDC dc (this);
BOOL bIsHighColor = dc.GetDeviceCaps (BITSPIXEL) > 8;
UINT uiBmpId = bIsHighColor ? IDB_CLASS_VIEW_24 : IDB_CLASS_VIEW;
CBitmap bmp;
if (!bmp.LoadBitmap (uiBmpId))
{
TRACE(_T ("Can't load bitmap: %x\n"), uiBmpId);
return;
}
BITMAP bmpObj;
bmp.GetBitmap (&bmpObj);
UINT nFlags = ILC_MASK;
nFlags |= (bIsHighColor) ? ILC_COLOR24 : ILC_COLOR4;
m_TreeImages.Create (16, bmpObj.bmHeight, nFlags, 0, 0);
m_TreeImages.Add (&bmp, RGB (255, 0, 0));
m_wndTree.SetImageList (&m_TreeImages, TVSIL_NORMAL);
}
| {
"content_hash": "9e8be52777fd2c2b719c2fd3ab1e146a",
"timestamp": "",
"source": "github",
"line_count": 402,
"max_line_length": 145,
"avg_line_length": 27.33830845771144,
"alnum_prop": 0.6957233848953595,
"repo_name": "LeeBamberTGC/Dark-Basic-Pro",
"id": "e958b084381590dab9a0c3615741c6f70f29fe31",
"size": "11139",
"binary": false,
"copies": "1",
"ref": "refs/heads/Initial-Files",
"path": "Synergy Editor TGC/Synergy Editor/ClassView.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3321"
},
{
"name": "C",
"bytes": "1939379"
},
{
"name": "C++",
"bytes": "17488290"
},
{
"name": "CMake",
"bytes": "40627"
},
{
"name": "CSS",
"bytes": "1341"
},
{
"name": "Dylan",
"bytes": "44"
},
{
"name": "FLUX",
"bytes": "1842"
},
{
"name": "GLSL",
"bytes": "1165"
},
{
"name": "HLSL",
"bytes": "71802"
},
{
"name": "HTML",
"bytes": "7022242"
},
{
"name": "Inno Setup",
"bytes": "346"
},
{
"name": "Logos",
"bytes": "9760642"
},
{
"name": "Lua",
"bytes": "2034"
},
{
"name": "Objective-C",
"bytes": "530684"
}
],
"symlink_target": ""
} |
.browsehappy {
margin: 0.2em 0;
background: #ccc;
color: #000;
padding: 0.2em 0;
}
/* Space out content a bit */
body {
padding-top: 20px;
padding-bottom: 20px;
}
/* Everything but the jumbotron gets side spacing for mobile first views */
.header,
.marketing,
.footer {
padding-left: 15px;
padding-right: 15px;
}
/* Custom page header */
.header {
border-bottom: 1px solid #e5e5e5;
}
/* Make the masthead heading the same height as the navigation */
.header h3 {
margin-top: 0;
margin-bottom: 0;
line-height: 40px;
padding-bottom: 19px;
}
/* Custom page footer */
.footer {
padding-top: 19px;
color: #777;
border-top: 1px solid #e5e5e5;
}
.container-narrow > hr {
margin: 30px 0;
}
/* Main marketing message and sign up button */
.jumbotron {
text-align: center;
border-bottom: 1px solid #e5e5e5;
}
.jumbotron .btn {
font-size: 21px;
padding: 14px 24px;
}
/* Supporting marketing content */
.marketing {
margin: 40px 0;
}
.marketing p + h4 {
margin-top: 28px;
}
.center {
text-align: center;
margin: auto;
}
.btn {
display: block;
margin: 5px auto;
}
.slider {
margin: 20px auto;
}
/* Responsive: Portrait tablets and up */
@media screen and (min-width: 768px) {
.container {
max-width: 940px;
}
/* Remove the padding we set earlier */
.header,
.marketing,
.footer {
padding-left: 0;
padding-right: 0;
}
/* Space out the masthead */
.header {
margin-bottom: 30px;
}
/* Remove the bottom border on the jumbotron for visual effect */
.jumbotron {
border-bottom: 0;
}
}
| {
"content_hash": "10b806112493245836733e822cc47e7e",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 75,
"avg_line_length": 16.42718446601942,
"alnum_prop": 0.5998817966903073,
"repo_name": "kurtbeheydt/arduino-rgbwifi",
"id": "99f18ebfb94522de30e23cedd0bdf107a7e48207",
"size": "1692",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "styles/main.css",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "5092"
},
{
"name": "CSS",
"bytes": "1692"
},
{
"name": "HTML",
"bytes": "7329"
},
{
"name": "JavaScript",
"bytes": "1772"
}
],
"symlink_target": ""
} |
/*---------------------------------------------------------------------------/
/ FatFs - FAT file system module configuration file R0.08b (C)ChaN, 2011
/----------------------------------------------------------------------------/
/
/ CAUTION! Do not forget to make clean the project after any changes to
/ the configuration options.
/
/----------------------------------------------------------------------------*/
#ifndef _FFCONF
#define _FFCONF 8237 /* Revision ID */
/*---------------------------------------------------------------------------/
/ Function and Buffer Configurations
/----------------------------------------------------------------------------*/
#define _FS_TINY 0 /* 0:Normal or 1:Tiny */
/* When _FS_TINY is set to 1, FatFs uses the sector buffer in the file system
/ object instead of the sector buffer in the individual file object for file
/ data transfer. This reduces memory consumption 512 bytes each file object. */
#define _FS_READONLY 0 /* 0:Read/Write or 1:Read only */
/* Setting _FS_READONLY to 1 defines read only configuration. This removes
/ writing functions, f_write, f_sync, f_unlink, f_mkdir, f_chmod, f_rename,
/ f_truncate and useless f_getfree. */
#define _FS_MINIMIZE 0 /* 0 to 3 */
/* The _FS_MINIMIZE option defines minimization level to remove some functions.
/
/ 0: Full function.
/ 1: f_stat, f_getfree, f_unlink, f_mkdir, f_chmod, f_truncate and f_rename
/ are removed.
/ 2: f_opendir and f_readdir are removed in addition to 1.
/ 3: f_lseek is removed in addition to 2. */
#define _USE_STRFUNC 1 /* 0:Disable or 1/2:Enable */
/* To enable string functions, set _USE_STRFUNC to 1 or 2. */
#define _USE_MKFS 1 /* 0:Disable or 1:Enable */
/* To enable f_mkfs function, set _USE_MKFS to 1 and set _FS_READONLY to 0 */
#define _USE_FORWARD 1 /* 0:Disable or 1:Enable */
/* To enable f_forward function, set _USE_FORWARD to 1 and set _FS_TINY to 1. */
#define _USE_FASTSEEK 1 /* 0:Disable or 1:Enable */
/* To enable fast seek feature, set _USE_FASTSEEK to 1. */
/*---------------------------------------------------------------------------/
/ Locale and Namespace Configurations
/----------------------------------------------------------------------------*/
#define _CODE_PAGE 950
/* The _CODE_PAGE specifies the OEM code page to be used on the target system.
/ Incorrect setting of the code page can cause a file open failure.
/
/ 932 - Japanese Shift-JIS (DBCS, OEM, Windows)
/ 936 - Simplified Chinese GBK (DBCS, OEM, Windows)
/ 949 - Korean (DBCS, OEM, Windows)
/ 950 - Traditional Chinese Big5 (DBCS, OEM, Windows)
/ 1250 - Central Europe (Windows)
/ 1251 - Cyrillic (Windows)
/ 1252 - Latin 1 (Windows)
/ 1253 - Greek (Windows)
/ 1254 - Turkish (Windows)
/ 1255 - Hebrew (Windows)
/ 1256 - Arabic (Windows)
/ 1257 - Baltic (Windows)
/ 1258 - Vietnam (OEM, Windows)
/ 437 - U.S. (OEM)
/ 720 - Arabic (OEM)
/ 737 - Greek (OEM)
/ 775 - Baltic (OEM)
/ 850 - Multilingual Latin 1 (OEM)
/ 858 - Multilingual Latin 1 + Euro (OEM)
/ 852 - Latin 2 (OEM)
/ 855 - Cyrillic (OEM)
/ 866 - Russian (OEM)
/ 857 - Turkish (OEM)
/ 862 - Hebrew (OEM)
/ 874 - Thai (OEM, Windows)
/ 1 - ASCII only (Valid for non LFN cfg.)
*/
#define _USE_LFN 0 /* 0 to 3 */
#define _MAX_LFN 255 /* Maximum LFN length to handle (12 to 255) */
/* The _USE_LFN option switches the LFN support.
/
/ 0: Disable LFN feature. _MAX_LFN and _LFN_UNICODE have no effect.
/ 1: Enable LFN with static working buffer on the BSS. Always NOT reentrant.
/ 2: Enable LFN with dynamic working buffer on the STACK.
/ 3: Enable LFN with dynamic working buffer on the HEAP.
/
/ The LFN working buffer occupies (_MAX_LFN + 1) * 2 bytes. To enable LFN,
/ Unicode handling functions ff_convert() and ff_wtoupper() must be added
/ to the project. When enable to use heap, memory control functions
/ ff_memalloc() and ff_memfree() must be added to the project. */
#define _LFN_UNICODE 0 /* 0:ANSI/OEM or 1:Unicode */
/* To switch the character code set on FatFs API to Unicode,
/ enable LFN feature and set _LFN_UNICODE to 1. */
#define _FS_RPATH 2 /* 0 to 2 */
/* The _FS_RPATH option configures relative path feature.
/
/ 0: Disable relative path feature and remove related functions.
/ 1: Enable relative path. f_chdrive() and f_chdir() are available.
/ 2: f_getcwd() is available in addition to 1.
/
/ Note that output of the f_readdir fnction is affected by this option. */
/*---------------------------------------------------------------------------/
/ Physical Drive Configurations
/----------------------------------------------------------------------------*/
#define _VOLUMES 1
/* Number of volumes (logical drives) to be used. */
#define _MAX_SS 512 /* 512, 1024, 2048 or 4096 */
/* Maximum sector size to be handled.
/ Always set 512 for memory card and hard disk but a larger value may be
/ required for on-board flash memory, floppy disk and optical disk.
/ When _MAX_SS is larger than 512, it configures FatFs to variable sector size
/ and GET_SECTOR_SIZE command must be implememted to the disk_ioctl function. */
#define _MULTI_PARTITION 0 /* 0:Single partition or 1:Multiple partition */
/* When set to 0, each volume is bound to the same physical drive number and
/ it can mount only first primaly partition. When it is set to 1, each volume
/ is tied to the partitions listed in VolToPart[]. */
#define _USE_ERASE 0 /* 0:Disable or 1:Enable */
/* To enable sector erase feature, set _USE_ERASE to 1. CTRL_ERASE_SECTOR command
/ should be added to the disk_ioctl functio. */
/*---------------------------------------------------------------------------/
/ System Configurations
/----------------------------------------------------------------------------*/
#define _WORD_ACCESS 0 /* 0 or 1 */
/* Set 0 first and it is always compatible with all platforms. The _WORD_ACCESS
/ option defines which access method is used to the word data on the FAT volume.
/
/ 0: Byte-by-byte access.
/ 1: Word access. Do not choose this unless following condition is met.
/
/ When the byte order on the memory is big-endian or address miss-aligned word
/ access results incorrect behavior, the _WORD_ACCESS must be set to 0.
/ If it is not the case, the value can also be set to 1 to improve the
/ performance and code size. */
/* A header file that defines sync object types on the O/S, such as
/ windows.h, ucos_ii.h and semphr.h, must be included prior to ff.h. */
#define _FS_REENTRANT 0 /* 0:Disable or 1:Enable */
#define _FS_TIMEOUT 1000 /* Timeout period in unit of time ticks */
#define _SYNC_t HANDLE /* O/S dependent type of sync object. e.g. HANDLE, OS_EVENT*, ID and etc.. */
/* The _FS_REENTRANT option switches the reentrancy (thread safe) of the FatFs module.
/
/ 0: Disable reentrancy. _SYNC_t and _FS_TIMEOUT have no effect.
/ 1: Enable reentrancy. Also user provided synchronization handlers,
/ ff_req_grant, ff_rel_grant, ff_del_syncobj and ff_cre_syncobj
/ function must be added to the project. */
#define _FS_SHARE 0 /* 0:Disable or >=1:Enable */
/* To enable file shareing feature, set _FS_SHARE to 1 or greater. The value
defines how many files can be opened simultaneously. */
#endif /* _FFCONFIG */
| {
"content_hash": "5f2e1024a8f72e6c4f54bdbbf627521f",
"timestamp": "",
"source": "github",
"line_count": 189,
"max_line_length": 102,
"avg_line_length": 39.735449735449734,
"alnum_prop": 0.5928095872170439,
"repo_name": "james54068/stm32f429_learning",
"id": "67a7dfc9cbf31bbbd810bc5de0bd8b0bf332e0aa",
"size": "7510",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "stm32f429_SDCard/sd/fatfs/ffconf.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "1781117"
},
{
"name": "C",
"bytes": "39049950"
},
{
"name": "C++",
"bytes": "2532831"
},
{
"name": "CSS",
"bytes": "270876"
},
{
"name": "JavaScript",
"bytes": "996886"
},
{
"name": "Makefile",
"bytes": "24127"
},
{
"name": "Objective-C",
"bytes": "11759"
},
{
"name": "Shell",
"bytes": "9891"
}
],
"symlink_target": ""
} |
FROM balenalib/beaglebone-green-gateway-debian:bullseye-build
ENV NODE_VERSION 12.21.0
ENV YARN_VERSION 1.22.4
RUN for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --batch --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --batch --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --batch --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \
done \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& echo "6edc31a210e47eb72b0a2a150f7fe604539c1b2a45e8c81d378ac9315053a54f node-v$NODE_VERSION-linux-armv7l.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-armv7l.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh" \
&& echo "Running test-stack@node" \
&& chmod +x test-stack@node.sh \
&& bash test-stack@node.sh \
&& rm -rf test-stack@node.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Debian Bullseye \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v12.21.0, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | {
"content_hash": "e238f7bbac4b26eb47f5dd7dc83aba70",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 696,
"avg_line_length": 67.90243902439025,
"alnum_prop": 0.7108477011494253,
"repo_name": "nghiant2710/base-images",
"id": "e30b3ea2c8665acc88947a7febbe8f3d8cd760db",
"size": "2805",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "balena-base-images/node/beaglebone-green-gateway/debian/bullseye/12.21.0/build/Dockerfile",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "144558581"
},
{
"name": "JavaScript",
"bytes": "16316"
},
{
"name": "Shell",
"bytes": "368690"
}
],
"symlink_target": ""
} |
package azure
import (
"bytes"
"encoding/binary"
"fmt"
"net/url"
"regexp"
"sync"
"strconv"
"strings"
"sync/atomic"
"time"
storage "github.com/Azure/azure-sdk-for-go/arm/storage"
azstorage "github.com/Azure/azure-sdk-for-go/storage"
"github.com/Azure/go-autorest/autorest/to"
"github.com/golang/glog"
"github.com/rubiojr/go-vhd/vhd"
kwait "k8s.io/apimachinery/pkg/util/wait"
"k8s.io/kubernetes/pkg/volume"
)
const (
vhdContainerName = "vhds"
useHTTPSForBlobBasedDisk = true
blobServiceName = "blob"
)
type storageAccountState struct {
name string
saType storage.SkuName
key string
diskCount int32
isValidating int32
defaultContainerCreated bool
}
//BlobDiskController : blob disk controller struct
type BlobDiskController struct {
common *controllerCommon
accounts map[string]*storageAccountState
}
var (
defaultContainerName = ""
storageAccountNamePrefix = ""
storageAccountNameMatch = ""
accountsLock = &sync.Mutex{}
)
func newBlobDiskController(common *controllerCommon) (*BlobDiskController, error) {
c := BlobDiskController{common: common}
c.setUniqueStrings()
// get accounts
accounts, err := c.getAllStorageAccounts()
if err != nil {
glog.Errorf("azureDisk - getAllStorageAccounts error: %v", err)
c.accounts = make(map[string]*storageAccountState)
return &c, nil
}
c.accounts = accounts
return &c, nil
}
// CreateVolume creates a VHD blob in a storage account that has storageType and location using the given storage account.
// If no storage account is given, search all the storage accounts associated with the resource group and pick one that
// fits storage type and location.
func (c *BlobDiskController) CreateVolume(name, storageAccount, storageAccountType, location string, requestGB int) (string, string, int, error) {
var err error
accounts := []accountWithLocation{}
if len(storageAccount) > 0 {
accounts = append(accounts, accountWithLocation{Name: storageAccount})
} else {
// find a storage account
accounts, err = c.common.cloud.getStorageAccounts()
if err != nil {
// TODO: create a storage account and container
return "", "", 0, err
}
}
for _, account := range accounts {
glog.V(4).Infof("account %s type %s location %s", account.Name, account.StorageType, account.Location)
if (storageAccountType == "" || account.StorageType == storageAccountType) && (location == "" || account.Location == location) || len(storageAccount) > 0 {
// find the access key with this account
key, err := c.common.cloud.getStorageAccesskey(account.Name)
if err != nil {
glog.V(2).Infof("no key found for storage account %s", account.Name)
continue
}
client, err := azstorage.NewBasicClientOnSovereignCloud(account.Name, key, c.common.cloud.Environment)
if err != nil {
return "", "", 0, err
}
blobClient := client.GetBlobService()
// create a page blob in this account's vhd container
diskName, diskURI, err := c.createVHDBlobDisk(blobClient, account.Name, name, vhdContainerName, int64(requestGB))
if err != nil {
return "", "", 0, err
}
glog.V(4).Infof("azureDisk - created vhd blob uri: %s", diskURI)
return diskName, diskURI, requestGB, err
}
}
return "", "", 0, fmt.Errorf("failed to find a matching storage account")
}
// DeleteVolume deletes a VHD blob
func (c *BlobDiskController) DeleteVolume(diskURI string) error {
glog.V(4).Infof("azureDisk - begin to delete volume %s", diskURI)
accountName, blob, err := c.common.cloud.getBlobNameAndAccountFromURI(diskURI)
if err != nil {
return fmt.Errorf("failed to parse vhd URI %v", err)
}
key, err := c.common.cloud.getStorageAccesskey(accountName)
if err != nil {
return fmt.Errorf("no key for storage account %s, err %v", accountName, err)
}
err = c.common.cloud.deleteVhdBlob(accountName, key, blob)
if err != nil {
glog.Warningf("azureDisk - failed to delete blob %s err: %v", diskURI, err)
detail := err.Error()
if strings.Contains(detail, errLeaseIDMissing) {
// disk is still being used
// see https://msdn.microsoft.com/en-us/library/microsoft.windowsazure.storage.blob.protocol.bloberrorcodestrings.leaseidmissing.aspx
return volume.NewDeletedVolumeInUseError(fmt.Sprintf("disk %q is still in use while being deleted", diskURI))
}
return fmt.Errorf("failed to delete vhd %v, account %s, blob %s, err: %v", diskURI, accountName, blob, err)
}
glog.V(4).Infof("azureDisk - blob %s deleted", diskURI)
return nil
}
// get diskURI https://foo.blob.core.windows.net/vhds/bar.vhd and return foo (account) and bar.vhd (blob name)
func (c *BlobDiskController) getBlobNameAndAccountFromURI(diskURI string) (string, string, error) {
scheme := "http"
if useHTTPSForBlobBasedDisk {
scheme = "https"
}
host := fmt.Sprintf("%s://(.*).%s.%s", scheme, blobServiceName, c.common.storageEndpointSuffix)
reStr := fmt.Sprintf("%s/%s/(.*)", host, vhdContainerName)
re := regexp.MustCompile(reStr)
res := re.FindSubmatch([]byte(diskURI))
if len(res) < 3 {
return "", "", fmt.Errorf("invalid vhd URI for regex %s: %s", reStr, diskURI)
}
return string(res[1]), string(res[2]), nil
}
func (c *BlobDiskController) createVHDBlobDisk(blobClient azstorage.BlobStorageClient, accountName, vhdName, containerName string, sizeGB int64) (string, string, error) {
container := blobClient.GetContainerReference(containerName)
size := 1024 * 1024 * 1024 * sizeGB
vhdSize := size + vhd.VHD_HEADER_SIZE /* header size */
// Blob name in URL must end with '.vhd' extension.
vhdName = vhdName + ".vhd"
tags := make(map[string]string)
tags["createdby"] = "k8sAzureDataDisk"
glog.V(4).Infof("azureDisk - creating page blob %s in container %s account %s", vhdName, containerName, accountName)
blob := container.GetBlobReference(vhdName)
blob.Properties.ContentLength = vhdSize
blob.Metadata = tags
err := blob.PutPageBlob(nil)
if err != nil {
// if container doesn't exist, create one and retry PutPageBlob
detail := err.Error()
if strings.Contains(detail, errContainerNotFound) {
err = container.Create(&azstorage.CreateContainerOptions{Access: azstorage.ContainerAccessTypePrivate})
if err == nil {
err = blob.PutPageBlob(nil)
}
}
}
if err != nil {
return "", "", fmt.Errorf("failed to put page blob %s in container %s: %v", vhdName, containerName, err)
}
// add VHD signature to the blob
h, err := createVHDHeader(uint64(size))
if err != nil {
blob.DeleteIfExists(nil)
return "", "", fmt.Errorf("failed to create vhd header, err: %v", err)
}
blobRange := azstorage.BlobRange{
Start: uint64(size),
End: uint64(vhdSize - 1),
}
if err = blob.WriteRange(blobRange, bytes.NewBuffer(h[:vhd.VHD_HEADER_SIZE]), nil); err != nil {
glog.Infof("azureDisk - failed to put header page for data disk %s in container %s account %s, error was %s\n",
vhdName, containerName, accountName, err.Error())
return "", "", err
}
scheme := "http"
if useHTTPSForBlobBasedDisk {
scheme = "https"
}
host := fmt.Sprintf("%s://%s.%s.%s", scheme, accountName, blobServiceName, c.common.storageEndpointSuffix)
uri := fmt.Sprintf("%s/%s/%s", host, containerName, vhdName)
return vhdName, uri, nil
}
// delete a vhd blob
func (c *BlobDiskController) deleteVhdBlob(accountName, accountKey, blobName string) error {
client, err := azstorage.NewBasicClientOnSovereignCloud(accountName, accountKey, c.common.cloud.Environment)
if err != nil {
return err
}
blobSvc := client.GetBlobService()
container := blobSvc.GetContainerReference(vhdContainerName)
blob := container.GetBlobReference(blobName)
return blob.Delete(nil)
}
//CreateBlobDisk : create a blob disk in a node
func (c *BlobDiskController) CreateBlobDisk(dataDiskName string, storageAccountType storage.SkuName, sizeGB int) (string, error) {
glog.V(4).Infof("azureDisk - creating blob data disk named:%s on StorageAccountType:%s", dataDiskName, storageAccountType)
storageAccountName, err := c.findSANameForDisk(storageAccountType)
if err != nil {
return "", err
}
blobClient, err := c.getBlobSvcClient(storageAccountName)
if err != nil {
return "", err
}
_, diskURI, err := c.createVHDBlobDisk(blobClient, storageAccountName, dataDiskName, defaultContainerName, int64(sizeGB))
if err != nil {
return "", err
}
atomic.AddInt32(&c.accounts[storageAccountName].diskCount, 1)
return diskURI, nil
}
//DeleteBlobDisk : delete a blob disk from a node
func (c *BlobDiskController) DeleteBlobDisk(diskURI string) error {
storageAccountName, vhdName, err := diskNameandSANameFromURI(diskURI)
if err != nil {
return err
}
_, ok := c.accounts[storageAccountName]
if !ok {
// the storage account is specified by user
glog.V(4).Infof("azureDisk - deleting volume %s", diskURI)
return c.DeleteVolume(diskURI)
}
blobSvc, err := c.getBlobSvcClient(storageAccountName)
if err != nil {
return err
}
glog.V(4).Infof("azureDisk - About to delete vhd file %s on storage account %s container %s", vhdName, storageAccountName, defaultContainerName)
container := blobSvc.GetContainerReference(defaultContainerName)
blob := container.GetBlobReference(vhdName)
_, err = blob.DeleteIfExists(nil)
if c.accounts[storageAccountName].diskCount == -1 {
if diskCount, err := c.getDiskCount(storageAccountName); err != nil {
c.accounts[storageAccountName].diskCount = int32(diskCount)
} else {
glog.Warningf("azureDisk - failed to get disk count for %s however the delete disk operation was ok", storageAccountName)
return nil // we have failed to aquire a new count. not an error condition
}
}
atomic.AddInt32(&c.accounts[storageAccountName].diskCount, -1)
return err
}
//Sets unique strings to be used as accountnames && || blob containers names
func (c *BlobDiskController) setUniqueStrings() {
uniqueString := c.common.resourceGroup + c.common.location + c.common.subscriptionID
hash := MakeCRC32(uniqueString)
//used to generate a unique container name used by this cluster PVC
defaultContainerName = hash
storageAccountNamePrefix = fmt.Sprintf(storageAccountNameTemplate, hash)
// Used to filter relevant accounts (accounts used by shared PVC)
storageAccountNameMatch = storageAccountNamePrefix
// Used as a template to create new names for relevant accounts
storageAccountNamePrefix = storageAccountNamePrefix + "%s"
}
func (c *BlobDiskController) getStorageAccountKey(SAName string) (string, error) {
if account, exists := c.accounts[SAName]; exists && account.key != "" {
return c.accounts[SAName].key, nil
}
listKeysResult, err := c.common.cloud.StorageAccountClient.ListKeys(c.common.resourceGroup, SAName)
if err != nil {
return "", err
}
if listKeysResult.Keys == nil {
return "", fmt.Errorf("azureDisk - empty listKeysResult in storage account:%s keys", SAName)
}
for _, v := range *listKeysResult.Keys {
if v.Value != nil && *v.Value == "key1" {
if _, ok := c.accounts[SAName]; !ok {
glog.Warningf("azureDisk - account %s was not cached while getting keys", SAName)
return *v.Value, nil
}
}
c.accounts[SAName].key = *v.Value
return c.accounts[SAName].key, nil
}
return "", fmt.Errorf("couldn't find key named key1 in storage account:%s keys", SAName)
}
func (c *BlobDiskController) getBlobSvcClient(SAName string) (azstorage.BlobStorageClient, error) {
key := ""
var client azstorage.Client
var blobSvc azstorage.BlobStorageClient
var err error
if key, err = c.getStorageAccountKey(SAName); err != nil {
return blobSvc, err
}
if client, err = azstorage.NewBasicClientOnSovereignCloud(SAName, key, c.common.cloud.Environment); err != nil {
return blobSvc, err
}
blobSvc = client.GetBlobService()
return blobSvc, nil
}
func (c *BlobDiskController) ensureDefaultContainer(storageAccountName string) error {
var err error
var blobSvc azstorage.BlobStorageClient
// short circuit the check via local cache
// we are forgiving the fact that account may not be in cache yet
if v, ok := c.accounts[storageAccountName]; ok && v.defaultContainerCreated {
return nil
}
// not cached, check existence and readiness
bExist, provisionState, _ := c.getStorageAccountState(storageAccountName)
// account does not exist
if !bExist {
return fmt.Errorf("azureDisk - account %s does not exist while trying to create/ensure default container", storageAccountName)
}
// account exists but not ready yet
if provisionState != storage.Succeeded {
// we don't want many attempts to validate the account readiness
// here hence we are locking
counter := 1
for swapped := atomic.CompareAndSwapInt32(&c.accounts[storageAccountName].isValidating, 0, 1); swapped != true; {
time.Sleep(3 * time.Second)
counter = counter + 1
// check if we passed the max sleep
if counter >= 20 {
return fmt.Errorf("azureDisk - timeout waiting to aquire lock to validate account:%s readiness", storageAccountName)
}
}
// swapped
defer func() {
c.accounts[storageAccountName].isValidating = 0
}()
// short circuit the check again.
if v, ok := c.accounts[storageAccountName]; ok && v.defaultContainerCreated {
return nil
}
err = kwait.ExponentialBackoff(defaultBackOff, func() (bool, error) {
_, provisionState, err := c.getStorageAccountState(storageAccountName)
if err != nil {
glog.V(4).Infof("azureDisk - GetStorageAccount:%s err %s", storageAccountName, err.Error())
return false, nil // error performing the query - retryable
}
if provisionState == storage.Succeeded {
return true, nil
}
glog.V(4).Infof("azureDisk - GetStorageAccount:%s not ready yet (not flagged Succeeded by ARM)", storageAccountName)
return false, nil // back off and see if the account becomes ready on next retry
})
// we have failed to ensure that account is ready for us to create
// the default vhd container
if err != nil {
if err == kwait.ErrWaitTimeout {
return fmt.Errorf("azureDisk - timed out waiting for storage account %s to become ready", storageAccountName)
}
return err
}
}
if blobSvc, err = c.getBlobSvcClient(storageAccountName); err != nil {
return err
}
container := blobSvc.GetContainerReference(defaultContainerName)
bCreated, err := container.CreateIfNotExists(&azstorage.CreateContainerOptions{Access: azstorage.ContainerAccessTypePrivate})
if err != nil {
return err
}
if bCreated {
glog.V(2).Infof("azureDisk - storage account:%s had no default container(%s) and it was created \n", storageAccountName, defaultContainerName)
}
// flag so we no longer have to check on ARM
c.accounts[storageAccountName].defaultContainerCreated = true
return nil
}
// Gets Disk counts per storage account
func (c *BlobDiskController) getDiskCount(SAName string) (int, error) {
// if we have it in cache
if c.accounts[SAName].diskCount != -1 {
return int(c.accounts[SAName].diskCount), nil
}
var err error
var blobSvc azstorage.BlobStorageClient
if err = c.ensureDefaultContainer(SAName); err != nil {
return 0, err
}
if blobSvc, err = c.getBlobSvcClient(SAName); err != nil {
return 0, err
}
params := azstorage.ListBlobsParameters{}
container := blobSvc.GetContainerReference(defaultContainerName)
response, err := container.ListBlobs(params)
if err != nil {
return 0, err
}
glog.V(4).Infof("azure-Disk - refreshed data count for account %s and found %v", SAName, len(response.Blobs))
c.accounts[SAName].diskCount = int32(len(response.Blobs))
return int(c.accounts[SAName].diskCount), nil
}
func (c *BlobDiskController) getAllStorageAccounts() (map[string]*storageAccountState, error) {
accountListResult, err := c.common.cloud.StorageAccountClient.ListByResourceGroup(c.common.resourceGroup)
if err != nil {
return nil, err
}
if accountListResult.Value == nil {
return nil, fmt.Errorf("azureDisk - empty accountListResult")
}
accounts := make(map[string]*storageAccountState)
for _, v := range *accountListResult.Value {
if strings.Index(*v.Name, storageAccountNameMatch) != 0 {
continue
}
if v.Name == nil || v.Sku == nil {
glog.Infof("azureDisk - accountListResult Name or Sku is nil")
continue
}
glog.Infof("azureDisk - identified account %s as part of shared PVC accounts", *v.Name)
sastate := &storageAccountState{
name: *v.Name,
saType: (*v.Sku).Name,
diskCount: -1,
}
accounts[*v.Name] = sastate
}
return accounts, nil
}
func (c *BlobDiskController) createStorageAccount(storageAccountName string, storageAccountType storage.SkuName, location string, checkMaxAccounts bool) error {
bExist, _, _ := c.getStorageAccountState(storageAccountName)
if bExist {
newAccountState := &storageAccountState{
diskCount: -1,
saType: storageAccountType,
name: storageAccountName,
}
c.addAccountState(storageAccountName, newAccountState)
}
// Account Does not exist
if !bExist {
if len(c.accounts) == maxStorageAccounts && checkMaxAccounts {
return fmt.Errorf("azureDisk - can not create new storage account, current storage accounts count:%v Max is:%v", len(c.accounts), maxStorageAccounts)
}
glog.V(2).Infof("azureDisk - Creating storage account %s type %s", storageAccountName, string(storageAccountType))
cp := storage.AccountCreateParameters{
Sku: &storage.Sku{Name: storageAccountType},
Tags: &map[string]*string{"created-by": to.StringPtr("azure-dd")},
Location: &location}
cancel := make(chan struct{})
_, errChan := c.common.cloud.StorageAccountClient.Create(c.common.resourceGroup, storageAccountName, cp, cancel)
err := <-errChan
if err != nil {
return fmt.Errorf(fmt.Sprintf("Create Storage Account: %s, error: %s", storageAccountName, err))
}
newAccountState := &storageAccountState{
diskCount: -1,
saType: storageAccountType,
name: storageAccountName,
}
c.addAccountState(storageAccountName, newAccountState)
}
// finally, make sure that we default container is created
// before handing it back over
return c.ensureDefaultContainer(storageAccountName)
}
// finds a new suitable storageAccount for this disk
func (c *BlobDiskController) findSANameForDisk(storageAccountType storage.SkuName) (string, error) {
maxDiskCount := maxDisksPerStorageAccounts
SAName := ""
totalDiskCounts := 0
countAccounts := 0 // account of this type.
for _, v := range c.accounts {
// filter out any stand-alone disks/accounts
if strings.Index(v.name, storageAccountNameMatch) != 0 {
continue
}
// note: we compute avg stratified by type.
// this is to enable user to grow per SA type to avoid low
// avg utilization on one account type skewing all data.
if v.saType == storageAccountType {
// compute average
dCount, err := c.getDiskCount(v.name)
if err != nil {
return "", err
}
totalDiskCounts = totalDiskCounts + dCount
countAccounts = countAccounts + 1
// empty account
if dCount == 0 {
glog.V(2).Infof("azureDisk - account %s identified for a new disk is because it has 0 allocated disks", v.name)
return v.name, nil // short circuit, avg is good and no need to adjust
}
// if this account is less allocated
if dCount < maxDiskCount {
maxDiskCount = dCount
SAName = v.name
}
}
}
// if we failed to find storageaccount
if SAName == "" {
glog.V(2).Infof("azureDisk - failed to identify a suitable account for new disk and will attempt to create new account")
SAName = getAccountNameForNum(c.getNextAccountNum())
err := c.createStorageAccount(SAName, storageAccountType, c.common.location, true)
if err != nil {
return "", err
}
return SAName, nil
}
disksAfter := totalDiskCounts + 1 // with the new one!
avgUtilization := float64(disksAfter) / float64(countAccounts*maxDisksPerStorageAccounts)
aboveAvg := (avgUtilization > storageAccountUtilizationBeforeGrowing)
// avg are not create and we should create more accounts if we can
if aboveAvg && countAccounts < maxStorageAccounts {
glog.V(2).Infof("azureDisk - shared storageAccounts utilzation(%v) > grow-at-avg-utilization (%v). New storage account will be created", avgUtilization, storageAccountUtilizationBeforeGrowing)
SAName = getAccountNameForNum(c.getNextAccountNum())
err := c.createStorageAccount(SAName, storageAccountType, c.common.location, true)
if err != nil {
return "", err
}
return SAName, nil
}
// averages are not ok and we are at capacity (max storage accounts allowed)
if aboveAvg && countAccounts == maxStorageAccounts {
glog.Infof("azureDisk - shared storageAccounts utilzation(%v) > grow-at-avg-utilization (%v). But k8s maxed on SAs for PVC(%v). k8s will now exceed grow-at-avg-utilization without adding accounts",
avgUtilization, storageAccountUtilizationBeforeGrowing, maxStorageAccounts)
}
// we found a storage accounts && [ avg are ok || we reached max sa count ]
return SAName, nil
}
func (c *BlobDiskController) getNextAccountNum() int {
max := 0
for k := range c.accounts {
// filter out accounts that are for standalone
if strings.Index(k, storageAccountNameMatch) != 0 {
continue
}
num := getAccountNumFromName(k)
if num > max {
max = num
}
}
return max + 1
}
func (c *BlobDiskController) deleteStorageAccount(storageAccountName string) error {
resp, err := c.common.cloud.StorageAccountClient.Delete(c.common.resourceGroup, storageAccountName)
if err != nil {
return fmt.Errorf("azureDisk - Delete of storage account '%s' failed with status %s...%v", storageAccountName, resp.Status, err)
}
c.removeAccountState(storageAccountName)
glog.Infof("azureDisk - Storage Account %s was deleted", storageAccountName)
return nil
}
//Gets storage account exist, provisionStatus, Error if any
func (c *BlobDiskController) getStorageAccountState(storageAccountName string) (bool, storage.ProvisioningState, error) {
account, err := c.common.cloud.StorageAccountClient.GetProperties(c.common.resourceGroup, storageAccountName)
if err != nil {
return false, "", err
}
return true, account.AccountProperties.ProvisioningState, nil
}
func (c *BlobDiskController) addAccountState(key string, state *storageAccountState) {
accountsLock.Lock()
defer accountsLock.Unlock()
if _, ok := c.accounts[key]; !ok {
c.accounts[key] = state
}
}
func (c *BlobDiskController) removeAccountState(key string) {
accountsLock.Lock()
defer accountsLock.Unlock()
delete(c.accounts, key)
}
// pads account num with zeros as needed
func getAccountNameForNum(num int) string {
sNum := strconv.Itoa(num)
missingZeros := 3 - len(sNum)
strZero := ""
for missingZeros > 0 {
strZero = strZero + "0"
missingZeros = missingZeros - 1
}
sNum = strZero + sNum
return fmt.Sprintf(storageAccountNamePrefix, sNum)
}
func getAccountNumFromName(accountName string) int {
nameLen := len(accountName)
num, _ := strconv.Atoi(accountName[nameLen-3:])
return num
}
func createVHDHeader(size uint64) ([]byte, error) {
h := vhd.CreateFixedHeader(size, &vhd.VHDOptions{})
b := new(bytes.Buffer)
err := binary.Write(b, binary.BigEndian, h)
if err != nil {
return nil, err
}
return b.Bytes(), nil
}
func diskNameandSANameFromURI(diskURI string) (string, string, error) {
uri, err := url.Parse(diskURI)
if err != nil {
return "", "", err
}
hostName := uri.Host
storageAccountName := strings.Split(hostName, ".")[0]
segments := strings.Split(uri.Path, "/")
diskNameVhd := segments[len(segments)-1]
return storageAccountName, diskNameVhd, nil
}
| {
"content_hash": "3f3ef316ae70b39df400e4aded2474ac",
"timestamp": "",
"source": "github",
"line_count": 706,
"max_line_length": 199,
"avg_line_length": 33.36402266288952,
"alnum_prop": 0.7186160050944598,
"repo_name": "ravigadde/kubernetes",
"id": "e68c23b896c672ef015202f9da3b2c378f06ab30",
"size": "24124",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "pkg/cloudprovider/providers/azure/azure_blobDiskController.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2840"
},
{
"name": "Go",
"bytes": "39431870"
},
{
"name": "HTML",
"bytes": "1199467"
},
{
"name": "Makefile",
"bytes": "73086"
},
{
"name": "Python",
"bytes": "2691185"
},
{
"name": "Ruby",
"bytes": "1782"
},
{
"name": "Shell",
"bytes": "1376067"
}
],
"symlink_target": ""
} |
// Copyright (C) Pash Contributors. License: GPL/BSD. See https://github.com/Pash-Project/Pash/
using System;
using System.Management.Automation.Host;
namespace System.Management.Automation
{
public interface ICommandRuntime
{
PSHost Host { get; }
bool ShouldContinue(string query, string caption);
bool ShouldContinue(string query, string caption, ref bool yesToAll, ref bool noToAll);
bool ShouldProcess(string target);
bool ShouldProcess(string target, string action);
bool ShouldProcess(string verboseDescription, string verboseWarning, string caption);
bool ShouldProcess(string verboseDescription, string verboseWarning, string caption, out ShouldProcessReason shouldProcessReason);
void ThrowTerminatingError(ErrorRecord errorRecord);
void WriteCommandDetail(string text);
void WriteDebug(string text);
void WriteError(ErrorRecord errorRecord);
void WriteObject(object sendToPipeline);
void WriteObject(object sendToPipeline, bool enumerateCollection);
void WriteProgress(ProgressRecord progressRecord);
void WriteProgress(long sourceId, ProgressRecord progressRecord);
void WriteVerbose(string text);
void WriteWarning(string text);
}
}
| {
"content_hash": "73d1a92e395f12aa3b2243b4c7def168",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 138,
"avg_line_length": 46.285714285714285,
"alnum_prop": 0.7353395061728395,
"repo_name": "sburnicki/Pash",
"id": "12ab54361ffd5f1d7e39719a10b871187e7fe4e2",
"size": "1298",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "Source/System.Management/Automation/ICommandRuntime.cs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "78"
},
{
"name": "C#",
"bytes": "3244932"
},
{
"name": "C++",
"bytes": "28426"
},
{
"name": "CSS",
"bytes": "6667"
},
{
"name": "HTML",
"bytes": "721068"
},
{
"name": "JavaScript",
"bytes": "1582"
},
{
"name": "Visual Basic",
"bytes": "50014"
},
{
"name": "XSLT",
"bytes": "1348"
}
],
"symlink_target": ""
} |
package org.andidev.applicationname.config.logging;
import java.io.IOException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
public class MDCInsertingServletFilter implements Filter {
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
putDataInMDC(request);
try {
chain.doFilter(request, response);
} finally {
removeDataFromMDC();
}
}
public static void putDataInMDC(ServletRequest request) {
MDC.putSession(getSessionId(request));
MDC.putUsername(getUsername(request));
}
public static void removeDataFromMDC() {
MDC.removeSession();
MDC.removeUsername();
}
@Override
public void init(FilterConfig fc) throws ServletException {
// Do nothing
}
@Override
public void destroy() {
// Do nothing
}
public static String getSessionId(ServletRequest request) {
if (!(request instanceof HttpServletRequest)) {
return null;
}
HttpSession session = ((HttpServletRequest) request).getSession();
if (session == null) {
return null;
}
return session.getId();
}
public static String getUsername(ServletRequest request) {
if (!(request instanceof HttpServletRequest)) {
return null;
}
return (String) ((HttpServletRequest) request).getAttribute("username");
}
}
| {
"content_hash": "a501b3fb2b5e66d3ecf5cf72904b8ef5",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 132,
"avg_line_length": 27.415384615384614,
"alnum_prop": 0.67003367003367,
"repo_name": "andidev/spring-bootstrap-enterprise",
"id": "df1235125b1aec7c90a6d8e098bed6fb52f05334",
"size": "1782",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/main/java/org/andidev/applicationname/config/logging/MDCInsertingServletFilter.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "53862"
},
{
"name": "Java",
"bytes": "136748"
},
{
"name": "JavaScript",
"bytes": "976572"
},
{
"name": "Ruby",
"bytes": "1010"
}
],
"symlink_target": ""
} |
require 'spec_helper'
describe Project do
before(:each) do
@project = Project.create({name: 'The Big One',
funding_due: 2.days.from_now,
amount: 5,
user_id: 1,
gateway_id: 1})
end
context "new/empty project" do
it "should start empty" do
@project.contributions.count.should == 0
end
it "should be editable" do
@project.editable?.should be_true
end
it "should not be fundable" do
@project.fundable?.should be_false
end
it "should not publish" do
expect { @project.publish! }.to raise_exception(Workflow::TransitionHalted)
end
end
context "fundable project" do
before(:each) do
@project.contributions.create({amount: 10, gateway_id: 1})
end
it "should calculate the amount of authorized contributions" do
@project.authorized_amount.should == 0
end
end
end
| {
"content_hash": "1ac3a50bfd4ccb2eef2baaf0b5a558e4",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 81,
"avg_line_length": 25.256410256410255,
"alnum_prop": 0.5807106598984771,
"repo_name": "donpdonp/crowdstarter",
"id": "23b0686154c23cb0f420b4b39878dbf5f8709d99",
"size": "985",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/models/project_spec.rb",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "10445"
},
{
"name": "CoffeeScript",
"bytes": "2041"
},
{
"name": "HTML",
"bytes": "33329"
},
{
"name": "JavaScript",
"bytes": "43996"
},
{
"name": "Ruby",
"bytes": "74477"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_151) on Wed Dec 04 18:43:10 MST 2019 -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Uses of Class org.wildfly.swarm.config.undertow.configuration.mod_cluster.RankedAffinity (BOM: * : All 2.5.1.Final-SNAPSHOT API)</title>
<meta name="date" content="2019-12-04">
<link rel="stylesheet" type="text/css" href="../../../../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.wildfly.swarm.config.undertow.configuration.mod_cluster.RankedAffinity (BOM: * : All 2.5.1.Final-SNAPSHOT API)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/RankedAffinity.html" title="class in org.wildfly.swarm.config.undertow.configuration.mod_cluster">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../../../../../../../../overview-tree.html">Tree</a></li>
<li><a href="../../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage">Thorntail API, 2.5.1.Final-SNAPSHOT</div>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../../index.html?org/wildfly/swarm/config/undertow/configuration/mod_cluster/class-use/RankedAffinity.html" target="_top">Frames</a></li>
<li><a href="RankedAffinity.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class org.wildfly.swarm.config.undertow.configuration.mod_cluster.RankedAffinity" class="title">Uses of Class<br>org.wildfly.swarm.config.undertow.configuration.mod_cluster.RankedAffinity</h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/RankedAffinity.html" title="class in org.wildfly.swarm.config.undertow.configuration.mod_cluster">RankedAffinity</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#org.wildfly.swarm.config.undertow.configuration">org.wildfly.swarm.config.undertow.configuration</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="#org.wildfly.swarm.config.undertow.configuration.mod_cluster">org.wildfly.swarm.config.undertow.configuration.mod_cluster</a></td>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="org.wildfly.swarm.config.undertow.configuration">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/RankedAffinity.html" title="class in org.wildfly.swarm.config.undertow.configuration.mod_cluster">RankedAffinity</a> in <a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/package-summary.html">org.wildfly.swarm.config.undertow.configuration</a></h3>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/package-summary.html">org.wildfly.swarm.config.undertow.configuration</a> that return <a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/RankedAffinity.html" title="class in org.wildfly.swarm.config.undertow.configuration.mod_cluster">RankedAffinity</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/RankedAffinity.html" title="class in org.wildfly.swarm.config.undertow.configuration.mod_cluster">RankedAffinity</a></code></td>
<td class="colLast"><span class="typeNameLabel">ModCluster.ModClusterResources.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/ModCluster.ModClusterResources.html#rankedAffinity--">rankedAffinity</a></span>()</code>
<div class="block">Web requests will have an affinity for the first available node in a
list typically comprised of: primary owner, backup nodes, local node
(if not a primary nor backup owner).</div>
</td>
</tr>
</tbody>
</table>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/package-summary.html">org.wildfly.swarm.config.undertow.configuration</a> with parameters of type <a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/RankedAffinity.html" title="class in org.wildfly.swarm.config.undertow.configuration.mod_cluster">RankedAffinity</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/ModCluster.html" title="type parameter in ModCluster">T</a></code></td>
<td class="colLast"><span class="typeNameLabel">ModCluster.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/ModCluster.html#rankedAffinity-org.wildfly.swarm.config.undertow.configuration.mod_cluster.RankedAffinity-">rankedAffinity</a></span>(<a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/RankedAffinity.html" title="class in org.wildfly.swarm.config.undertow.configuration.mod_cluster">RankedAffinity</a> value)</code>
<div class="block">Web requests will have an affinity for the first available node in a list
typically comprised of: primary owner, backup nodes, local node (if not a
primary nor backup owner).</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="org.wildfly.swarm.config.undertow.configuration.mod_cluster">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/RankedAffinity.html" title="class in org.wildfly.swarm.config.undertow.configuration.mod_cluster">RankedAffinity</a> in <a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/package-summary.html">org.wildfly.swarm.config.undertow.configuration.mod_cluster</a></h3>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/package-summary.html">org.wildfly.swarm.config.undertow.configuration.mod_cluster</a> with type parameters of type <a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/RankedAffinity.html" title="class in org.wildfly.swarm.config.undertow.configuration.mod_cluster">RankedAffinity</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>class </code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/RankedAffinity.html" title="class in org.wildfly.swarm.config.undertow.configuration.mod_cluster">RankedAffinity</a><T extends <a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/RankedAffinity.html" title="class in org.wildfly.swarm.config.undertow.configuration.mod_cluster">RankedAffinity</a><T>></span></code>
<div class="block">Web requests will have an affinity for the first available node in a list
typically comprised of: primary owner, backup nodes, local node (if not a
primary nor backup owner).</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>interface </code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/RankedAffinityConsumer.html" title="interface in org.wildfly.swarm.config.undertow.configuration.mod_cluster">RankedAffinityConsumer</a><T extends <a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/RankedAffinity.html" title="class in org.wildfly.swarm.config.undertow.configuration.mod_cluster">RankedAffinity</a><T>></span></code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>interface </code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/RankedAffinitySupplier.html" title="interface in org.wildfly.swarm.config.undertow.configuration.mod_cluster">RankedAffinitySupplier</a><T extends <a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/RankedAffinity.html" title="class in org.wildfly.swarm.config.undertow.configuration.mod_cluster">RankedAffinity</a>></span></code> </td>
</tr>
</tbody>
</table>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/package-summary.html">org.wildfly.swarm.config.undertow.configuration.mod_cluster</a> that return <a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/RankedAffinity.html" title="class in org.wildfly.swarm.config.undertow.configuration.mod_cluster">RankedAffinity</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/RankedAffinity.html" title="class in org.wildfly.swarm.config.undertow.configuration.mod_cluster">RankedAffinity</a></code></td>
<td class="colLast"><span class="typeNameLabel">RankedAffinitySupplier.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/RankedAffinitySupplier.html#get--">get</a></span>()</code>
<div class="block">Constructed instance of RankedAffinity resource</div>
</td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../../org/wildfly/swarm/config/undertow/configuration/mod_cluster/RankedAffinity.html" title="class in org.wildfly.swarm.config.undertow.configuration.mod_cluster">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../../../../../../../../overview-tree.html">Tree</a></li>
<li><a href="../../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage">Thorntail API, 2.5.1.Final-SNAPSHOT</div>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../../index.html?org/wildfly/swarm/config/undertow/configuration/mod_cluster/class-use/RankedAffinity.html" target="_top">Frames</a></li>
<li><a href="RankedAffinity.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2019 <a href="http://www.jboss.org">JBoss by Red Hat</a>. All rights reserved.</small></p>
</body>
</html>
| {
"content_hash": "abe41b4f241962624291c5aff66d64f5",
"timestamp": "",
"source": "github",
"line_count": 238,
"max_line_length": 545,
"avg_line_length": 62.28151260504202,
"alnum_prop": 0.6809687647574715,
"repo_name": "wildfly-swarm/wildfly-swarm-javadocs",
"id": "e8adeec8b608ffdef15b94c1ca5fb7a21a0414bb",
"size": "14823",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "2.5.1.Final-SNAPSHOT/apidocs/org/wildfly/swarm/config/undertow/configuration/mod_cluster/class-use/RankedAffinity.html",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
title: Hugo support in Atom
repo: mattstratton/language-hugo
date: 2018-04-20 15:30:49 -0400
description: Hugo grammar, snippets and shortcodes
tools:
- Helper
license: Open Source
related_tools:
- tool/hugo.md
tags:
urls:
website: https://atom.io/packages/language-hugo
github: https://github.com/mattstratton/language-hugo
twitter: ''
other: ''
resources: []
cat_test: ''
---
| {
"content_hash": "50d85987a9826371e64049d23b49fc92",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 55,
"avg_line_length": 19.4,
"alnum_prop": 0.729381443298969,
"repo_name": "budparr/thenewdynamic",
"id": "9315571ce5697e62beb0d5780c70c95344d480ef",
"size": "392",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "content/tool/atom-language-hugo.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "79576"
},
{
"name": "HTML",
"bytes": "60524"
},
{
"name": "JavaScript",
"bytes": "2712"
},
{
"name": "Ruby",
"bytes": "3024"
}
],
"symlink_target": ""
} |
package core
import (
"k8s.io/autoscaler/cluster-autoscaler/cloudprovider/oci/oci-go-sdk/v43/common"
)
// CreateClusterNetworkInstancePoolDetails The data to create an instance pool in a cluster network.
type CreateClusterNetworkInstancePoolDetails struct {
// The OCID (https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the instance configuration
// associated with the instance pool.
InstanceConfigurationId *string `mandatory:"true" json:"instanceConfigurationId"`
// The number of instances that should be in the instance pool.
Size *int `mandatory:"true" json:"size"`
// Defined tags for this resource. Each key is predefined and scoped to a
// namespace. For more information, see Resource Tags (https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
// Example: `{"Operations": {"CostCenter": "42"}}`
DefinedTags map[string]map[string]interface{} `mandatory:"false" json:"definedTags"`
// A user-friendly name. Does not have to be unique. Avoid entering confidential information.
DisplayName *string `mandatory:"false" json:"displayName"`
// Free-form tags for this resource. Each tag is a simple key-value pair with no
// predefined name, type, or namespace. For more information, see Resource Tags (https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
// Example: `{"Department": "Finance"}`
FreeformTags map[string]string `mandatory:"false" json:"freeformTags"`
}
func (m CreateClusterNetworkInstancePoolDetails) String() string {
return common.PointerString(m)
}
| {
"content_hash": "43168756d2985eb97d1a2fa0605b0d67",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 160,
"avg_line_length": 47.84848484848485,
"alnum_prop": 0.7701076630778974,
"repo_name": "mmerrill3/autoscaler",
"id": "976fe37f922be4b0d4adaf97011f4f6a71f95470",
"size": "2402",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cluster-autoscaler/cloudprovider/oci/oci-go-sdk/v43/core/create_cluster_network_instance_pool_details.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "5400"
},
{
"name": "Go",
"bytes": "14677138"
},
{
"name": "Makefile",
"bytes": "17449"
},
{
"name": "Mustache",
"bytes": "3660"
},
{
"name": "Python",
"bytes": "20835"
},
{
"name": "Ruby",
"bytes": "1255"
},
{
"name": "Shell",
"bytes": "50146"
}
],
"symlink_target": ""
} |
package org.elasticsearch.index.translog;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TwoPhaseCommit;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.ReleasablePagedBytesReference;
import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.common.util.concurrent.ReleasableLock;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
import org.elasticsearch.index.shard.IndexShardComponent;
import java.io.Closeable;
import java.io.EOFException;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* A Translog is a per index shard component that records all non-committed index operations in a durable manner.
* In Elasticsearch there is one Translog instance per {@link org.elasticsearch.index.engine.InternalEngine}. The engine
* records the current translog generation {@link Translog#getGeneration()} in it's commit metadata using {@link #TRANSLOG_GENERATION_KEY}
* to reference the generation that contains all operations that have not yet successfully been committed to the engines lucene index.
* Additionally, since Elasticsearch 2.0 the engine also records a {@link #TRANSLOG_UUID_KEY} with each commit to ensure a strong association
* between the lucene index an the transaction log file. This UUID is used to prevent accidential recovery from a transaction log that belongs to a
* different engine.
* <p>
* Each Translog has only one translog file open at any time referenced by a translog generation ID. This ID is written to a <tt>translog.ckp</tt> file that is designed
* to fit in a single disk block such that a write of the file is atomic. The checkpoint file is written on each fsync operation of the translog and records the number of operations
* written, the current tranlogs file generation and it's fsynced offset in bytes.
* </p>
* <p>
* When a translog is opened the checkpoint is use to retrieve the latest translog file generation and subsequently to open the last written file to recovery operations.
* The {@link org.elasticsearch.index.translog.Translog.TranslogGeneration}, given when the translog is opened / constructed is compared against
* the latest generation and all consecutive translog files singe the given generation and the last generation in the checkpoint will be recovered and preserved until the next
* generation is committed using {@link Translog#commit()}. In the common case the translog file generation in the checkpoint and the generation passed to the translog on creation are
* the same. The only situation when they can be different is when an actual translog commit fails in between {@link Translog#prepareCommit()} and {@link Translog#commit()}. In such a case
* the currently being committed translog file will not be deleted since it's commit was not successful. Yet, a new/current translog file is already opened at that point such that there is more than
* one translog file present. Such an uncommitted translog file always has a <tt>translog-${gen}.ckp</tt> associated with it which is an fsynced copy of the it's last <tt>translog.ckp</tt> such that in
* disaster recovery last fsynced offsets, number of operation etc. are still preserved.
* </p>
*/
public class Translog extends AbstractIndexShardComponent implements IndexShardComponent, Closeable, TwoPhaseCommit {
/*
* TODO
* - we might need something like a deletion policy to hold on to more than one translog eventually (I think sequence IDs needs this) but we can refactor as we go
* - use a simple BufferedOutputStream to write stuff and fold BufferedTranslogWriter into it's super class... the tricky bit is we need to be able to do random access reads even from the buffer
* - we need random exception on the FileSystem API tests for all this.
* - we need to page align the last write before we sync, we can take advantage of ensureSynced for this since we might have already fsynced far enough
*/
public static final String TRANSLOG_GENERATION_KEY = "translog_generation";
public static final String TRANSLOG_UUID_KEY = "translog_uuid";
public static final String TRANSLOG_FILE_PREFIX = "translog-";
public static final String TRANSLOG_FILE_SUFFIX = ".tlog";
public static final String CHECKPOINT_SUFFIX = ".ckp";
public static final String CHECKPOINT_FILE_NAME = "translog" + CHECKPOINT_SUFFIX;
static final Pattern PARSE_STRICT_ID_PATTERN = Pattern.compile("^" + TRANSLOG_FILE_PREFIX + "(\\d+)(\\.tlog)$");
// the list of translog readers is guaranteed to be in order of translog generation
private final List<TranslogReader> readers = new ArrayList<>();
private volatile ScheduledFuture<?> syncScheduler;
// this is a concurrent set and is not protected by any of the locks. The main reason
// is that is being accessed by two separate classes (additions & reading are done by Translog, remove by View when closed)
private final Set<View> outstandingViews = ConcurrentCollections.newConcurrentSet();
private BigArrays bigArrays;
protected final ReleasableLock readLock;
protected final ReleasableLock writeLock;
private final Path location;
private TranslogWriter current;
private final static long NOT_SET_GENERATION = -1; // -1 is safe as it will not cause a translog deletion.
private volatile long currentCommittingGeneration = NOT_SET_GENERATION;
private volatile long lastCommittedTranslogFileGeneration = NOT_SET_GENERATION;
private final AtomicBoolean closed = new AtomicBoolean();
private final TranslogConfig config;
private final String translogUUID;
/**
* Creates a new Translog instance. This method will create a new transaction log unless the given {@link TranslogConfig} has
* a non-null {@link org.elasticsearch.index.translog.Translog.TranslogGeneration}. If the generation is null this method
* us destructive and will delete all files in the translog path given.
*
* @param config the configuration of this translog
* @param translogGeneration the translog generation to open. If this is <code>null</code> a new translog is created. If non-null
* the translog tries to open the given translog generation. The generation is treated as the last generation referenced
* form already committed data. This means all operations that have not yet been committed should be in the translog
* file referenced by this generation. The translog creation will fail if this generation can't be opened.
*
* @see TranslogConfig#getTranslogPath()
*
*/
public Translog(TranslogConfig config, TranslogGeneration translogGeneration) throws IOException {
super(config.getShardId(), config.getIndexSettings());
this.config = config;
if (translogGeneration == null || translogGeneration.translogUUID == null) { // legacy case
translogUUID = UUIDs.randomBase64UUID();
} else {
translogUUID = translogGeneration.translogUUID;
}
bigArrays = config.getBigArrays();
ReadWriteLock rwl = new ReentrantReadWriteLock();
readLock = new ReleasableLock(rwl.readLock());
writeLock = new ReleasableLock(rwl.writeLock());
this.location = config.getTranslogPath();
Files.createDirectories(this.location);
try {
if (translogGeneration != null) {
final Checkpoint checkpoint = readCheckpoint();
final Path nextTranslogFile = location.resolve(getFilename(checkpoint.generation + 1));
final Path currentCheckpointFile = location.resolve(getCommitCheckpointFileName(checkpoint.generation));
// this is special handling for error condition when we create a new writer but we fail to bake
// the newly written file (generation+1) into the checkpoint. This is still a valid state
// we just need to cleanup before we continue
// we hit this before and then blindly deleted the new generation even though we managed to bake it in and then hit this:
// https://discuss.elastic.co/t/cannot-recover-index-because-of-missing-tanslog-files/38336 as an example
//
// For this to happen we must have already copied the translog.ckp file into translog-gen.ckp so we first check if that file exists
// if not we don't even try to clean it up and wait until we fail creating it
assert Files.exists(nextTranslogFile) == false || Files.size(nextTranslogFile) <= TranslogWriter.getHeaderLength(translogUUID) : "unexpected translog file: [" + nextTranslogFile + "]";
if (Files.exists(currentCheckpointFile) // current checkpoint is already copied
&& Files.deleteIfExists(nextTranslogFile)) { // delete it and log a warning
logger.warn("deleted previously created, but not yet committed, next generation [{}]. This can happen due to a tragic exception when creating a new generation", nextTranslogFile.getFileName());
}
this.readers.addAll(recoverFromFiles(translogGeneration, checkpoint));
if (readers.isEmpty()) {
throw new IllegalStateException("at least one reader must be recovered");
}
boolean success = false;
try {
current = createWriter(checkpoint.generation + 1);
this.lastCommittedTranslogFileGeneration = translogGeneration.translogFileGeneration;
success = true;
} finally {
// we have to close all the recovered ones otherwise we leak file handles here
// for instance if we have a lot of tlog and we can't create the writer we keep on holding
// on to all the uncommitted tlog files if we don't close
if (success == false) {
IOUtils.closeWhileHandlingException(readers);
}
}
} else {
IOUtils.rm(location);
logger.debug("wipe translog location - creating new translog");
Files.createDirectories(location);
final long generation = 1;
Checkpoint checkpoint = new Checkpoint(0, 0, generation);
Checkpoint.write(location.resolve(CHECKPOINT_FILE_NAME), checkpoint, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW);
current = createWriter(generation);
this.lastCommittedTranslogFileGeneration = NOT_SET_GENERATION;
}
// now that we know which files are there, create a new current one.
} catch (Throwable t) {
// close the opened translog files if we fail to create a new translog...
IOUtils.closeWhileHandlingException(current);
IOUtils.closeWhileHandlingException(readers);
throw t;
}
}
/** recover all translog files found on disk */
private final ArrayList<TranslogReader> recoverFromFiles(TranslogGeneration translogGeneration, Checkpoint checkpoint) throws IOException {
boolean success = false;
ArrayList<TranslogReader> foundTranslogs = new ArrayList<>();
final Path tempFile = Files.createTempFile(location, TRANSLOG_FILE_PREFIX, TRANSLOG_FILE_SUFFIX); // a temp file to copy checkpoint to - note it must be in on the same FS otherwise atomic move won't work
boolean tempFileRenamed = false;
try (ReleasableLock lock = writeLock.acquire()) {
logger.debug("open uncommitted translog checkpoint {}", checkpoint);
final String checkpointTranslogFile = getFilename(checkpoint.generation);
for (long i = translogGeneration.translogFileGeneration; i < checkpoint.generation; i++) {
Path committedTranslogFile = location.resolve(getFilename(i));
if (Files.exists(committedTranslogFile) == false) {
throw new IllegalStateException("translog file doesn't exist with generation: " + i + " lastCommitted: " + lastCommittedTranslogFileGeneration + " checkpoint: " + checkpoint.generation + " - translog ids must be consecutive");
}
final TranslogReader reader = openReader(committedTranslogFile, Checkpoint.read(location.resolve(getCommitCheckpointFileName(i))));
foundTranslogs.add(reader);
logger.debug("recovered local translog from checkpoint {}", checkpoint);
}
foundTranslogs.add(openReader(location.resolve(checkpointTranslogFile), checkpoint));
Path commitCheckpoint = location.resolve(getCommitCheckpointFileName(checkpoint.generation));
if (Files.exists(commitCheckpoint)) {
Checkpoint checkpointFromDisk = Checkpoint.read(commitCheckpoint);
if (checkpoint.equals(checkpointFromDisk) == false) {
throw new IllegalStateException("Checkpoint file " + commitCheckpoint.getFileName() + " already exists but has corrupted content expected: " + checkpoint + " but got: " + checkpointFromDisk);
}
} else {
// we first copy this into the temp-file and then fsync it followed by an atomic move into the target file
// that way if we hit a disk-full here we are still in an consistent state.
Files.copy(location.resolve(CHECKPOINT_FILE_NAME), tempFile, StandardCopyOption.REPLACE_EXISTING);
IOUtils.fsync(tempFile, false);
Files.move(tempFile, commitCheckpoint, StandardCopyOption.ATOMIC_MOVE);
tempFileRenamed = true;
// we only fsync the directory the tempFile was already fsynced
IOUtils.fsync(commitCheckpoint.getParent(), true);
}
success = true;
} finally {
if (success == false) {
IOUtils.closeWhileHandlingException(foundTranslogs);
}
if (tempFileRenamed == false) {
try {
Files.delete(tempFile);
} catch (IOException ex) {
logger.warn("failed to delete temp file {}", ex, tempFile);
}
}
}
return foundTranslogs;
}
TranslogReader openReader(Path path, Checkpoint checkpoint) throws IOException {
FileChannel channel = FileChannel.open(path, StandardOpenOption.READ);
try {
assert Translog.parseIdFromFileName(path) == checkpoint.generation : "expected generation: " + Translog.parseIdFromFileName(path) + " but got: " + checkpoint.generation;
TranslogReader reader = TranslogReader.open(channel, path, checkpoint, translogUUID);
channel = null;
return reader;
} finally {
IOUtils.close(channel);
}
}
/**
* Extracts the translog generation from a file name.
*
* @throws IllegalArgumentException if the path doesn't match the expected pattern.
*/
public static long parseIdFromFileName(Path translogFile) {
final String fileName = translogFile.getFileName().toString();
final Matcher matcher = PARSE_STRICT_ID_PATTERN.matcher(fileName);
if (matcher.matches()) {
try {
return Long.parseLong(matcher.group(1));
} catch (NumberFormatException e) {
throw new IllegalStateException("number formatting issue in a file that passed PARSE_STRICT_ID_PATTERN: " + fileName + "]", e);
}
}
throw new IllegalArgumentException("can't parse id from file: " + fileName);
}
/** Returns {@code true} if this {@code Translog} is still open. */
public boolean isOpen() {
return closed.get() == false;
}
@Override
public void close() throws IOException {
if (closed.compareAndSet(false, true)) {
try (ReleasableLock lock = writeLock.acquire()) {
try {
current.sync();
} finally {
closeFilesIfNoPendingViews();
}
} finally {
FutureUtils.cancel(syncScheduler);
logger.debug("translog closed");
}
}
}
/**
* Returns all translog locations as absolute paths.
* These paths don't contain actual translog files they are
* directories holding the transaction logs.
*/
public Path location() {
return location;
}
/**
* Returns the generation of the current transaction log.
*/
public long currentFileGeneration() {
try (ReleasableLock lock = readLock.acquire()) {
return current.getGeneration();
}
}
/**
* Returns the number of operations in the transaction files that aren't committed to lucene..
*/
public int totalOperations() {
return totalOperations(lastCommittedTranslogFileGeneration);
}
/**
* Returns the size in bytes of the translog files that aren't committed to lucene.
*/
public long sizeInBytes() {
return sizeInBytes(lastCommittedTranslogFileGeneration);
}
/**
* Returns the number of operations in the transaction files that aren't committed to lucene..
*/
private int totalOperations(long minGeneration) {
try (ReleasableLock ignored = readLock.acquire()) {
ensureOpen();
return Stream.concat(readers.stream(), Stream.of(current))
.filter(r -> r.getGeneration() >= minGeneration)
.mapToInt(BaseTranslogReader::totalOperations)
.sum();
}
}
/**
* Returns the size in bytes of the translog files that aren't committed to lucene.
*/
private long sizeInBytes(long minGeneration) {
try (ReleasableLock ignored = readLock.acquire()) {
ensureOpen();
return Stream.concat(readers.stream(), Stream.of(current))
.filter(r -> r.getGeneration() >= minGeneration)
.mapToLong(BaseTranslogReader::sizeInBytes)
.sum();
}
}
TranslogWriter createWriter(long fileGeneration) throws IOException {
TranslogWriter newFile;
try {
newFile = TranslogWriter.create(shardId, translogUUID, fileGeneration, location.resolve(getFilename(fileGeneration)), getChannelFactory(), config.getBufferSize());
} catch (IOException e) {
throw new TranslogException(shardId, "failed to create new translog file", e);
}
return newFile;
}
/**
* Read the Operation object from the given location. This method will try to read the given location from
* the current or from the currently committing translog file. If the location is in a file that has already
* been closed or even removed the method will return <code>null</code> instead.
*/
public Translog.Operation read(Location location) {
try (ReleasableLock lock = readLock.acquire()) {
final BaseTranslogReader reader;
final long currentGeneration = current.getGeneration();
if (currentGeneration == location.generation) {
reader = current;
} else if (readers.isEmpty() == false && readers.get(readers.size() - 1).getGeneration() == location.generation) {
reader = readers.get(readers.size() - 1);
} else if (currentGeneration < location.generation) {
throw new IllegalStateException("location generation [" + location.generation + "] is greater than the current generation [" + currentGeneration + "]");
} else {
return null;
}
return reader.read(location);
} catch (IOException e) {
throw new ElasticsearchException("failed to read source from translog location " + location, e);
}
}
/**
* Adds a delete / index operations to the transaction log.
*
* @see org.elasticsearch.index.translog.Translog.Operation
* @see Index
* @see org.elasticsearch.index.translog.Translog.Delete
*/
public Location add(Operation operation) throws IOException {
final ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput(bigArrays);
try {
final BufferedChecksumStreamOutput checksumStreamOutput = new BufferedChecksumStreamOutput(out);
final long start = out.position();
out.skip(Integer.BYTES);
writeOperationNoSize(checksumStreamOutput, operation);
final long end = out.position();
final int operationSize = (int) (end - Integer.BYTES - start);
out.seek(start);
out.writeInt(operationSize);
out.seek(end);
final ReleasablePagedBytesReference bytes = out.bytes();
try (ReleasableLock lock = readLock.acquire()) {
ensureOpen();
Location location = current.add(bytes);
assert assertBytesAtLocation(location, bytes);
return location;
}
} catch (AlreadyClosedException | IOException ex) {
closeOnTragicEvent(ex);
throw ex;
} catch (Throwable e) {
closeOnTragicEvent(e);
throw new TranslogException(shardId, "Failed to write operation [" + operation + "]", e);
} finally {
Releasables.close(out.bytes());
}
}
boolean assertBytesAtLocation(Translog.Location location, BytesReference expectedBytes) throws IOException {
// tests can override this
ByteBuffer buffer = ByteBuffer.allocate(location.size);
current.readBytes(buffer, location.translogLocation);
return new BytesArray(buffer.array()).equals(expectedBytes);
}
/**
* Snapshots the current transaction log allowing to safely iterate over the snapshot.
* Snapshots are fixed in time and will not be updated with future operations.
*/
public Snapshot newSnapshot() {
return createSnapshot(Long.MIN_VALUE);
}
private Snapshot createSnapshot(long minGeneration) {
try (ReleasableLock ignored = readLock.acquire()) {
ensureOpen();
Snapshot[] snapshots = Stream.concat(readers.stream(), Stream.of(current))
.filter(reader -> reader.getGeneration() >= minGeneration)
.map(BaseTranslogReader::newSnapshot).toArray(Snapshot[]::new);
return new MultiSnapshot(snapshots);
}
}
/**
* Returns a view into the current translog that is guaranteed to retain all current operations
* while receiving future ones as well
*/
public Translog.View newView() {
try (ReleasableLock lock = readLock.acquire()) {
ensureOpen();
View view = new View(lastCommittedTranslogFileGeneration);
outstandingViews.add(view);
return view;
}
}
/**
* Sync's the translog.
*/
public void sync() throws IOException {
try (ReleasableLock lock = readLock.acquire()) {
if (closed.get() == false) {
current.sync();
}
} catch (Throwable ex) {
closeOnTragicEvent(ex);
throw ex;
}
}
public boolean syncNeeded() {
try (ReleasableLock lock = readLock.acquire()) {
return current.syncNeeded();
}
}
/** package private for testing */
public static String getFilename(long generation) {
return TRANSLOG_FILE_PREFIX + generation + TRANSLOG_FILE_SUFFIX;
}
static String getCommitCheckpointFileName(long generation) {
return TRANSLOG_FILE_PREFIX + generation + CHECKPOINT_SUFFIX;
}
/**
* Ensures that the given location has be synced / written to the underlying storage.
*
* @return Returns <code>true</code> iff this call caused an actual sync operation otherwise <code>false</code>
*/
public boolean ensureSynced(Location location) throws IOException {
try (ReleasableLock lock = readLock.acquire()) {
if (location.generation == current.getGeneration()) { // if we have a new one it's already synced
ensureOpen();
return current.syncUpTo(location.translogLocation + location.size);
}
} catch (Throwable ex) {
closeOnTragicEvent(ex);
throw ex;
}
return false;
}
private void closeOnTragicEvent(Throwable ex) {
if (current.getTragicException() != null) {
try {
close();
} catch (AlreadyClosedException inner) {
// don't do anything in this case. The AlreadyClosedException comes from TranslogWriter and we should not add it as suppressed because
// will contain the Exception ex as cause. See also https://github.com/elastic/elasticsearch/issues/15941
} catch (Exception inner) {
assert (ex != inner.getCause());
ex.addSuppressed(inner);
}
}
}
/**
* return stats
*/
public TranslogStats stats() {
// acquire lock to make the two numbers roughly consistent (no file change half way)
try (ReleasableLock lock = readLock.acquire()) {
return new TranslogStats(totalOperations(), sizeInBytes());
}
}
private boolean isReferencedGeneration(long generation) { // used to make decisions if a file can be deleted
return generation >= lastCommittedTranslogFileGeneration;
}
public TranslogConfig getConfig() {
return config;
}
/**
* a view into the translog, capturing all translog file at the moment of creation
* and updated with any future translog.
*/
/**
* a view into the translog, capturing all translog file at the moment of creation
* and updated with any future translog.
*/
public class View implements Closeable {
AtomicBoolean closed = new AtomicBoolean();
final long minGeneration;
View(long minGeneration) {
this.minGeneration = minGeneration;
}
/** this smallest translog generation in this view */
public long minTranslogGeneration() {
return minGeneration;
}
/**
* The total number of operations in the view.
*/
public int totalOperations() {
return Translog.this.totalOperations(minGeneration);
}
/**
* Returns the size in bytes of the files behind the view.
*/
public long sizeInBytes() {
return Translog.this.sizeInBytes(minGeneration);
}
/** create a snapshot from this view */
public Snapshot snapshot() {
ensureOpen();
return Translog.this.createSnapshot(minGeneration);
}
void ensureOpen() {
if (closed.get()) {
throw new AlreadyClosedException("View is already closed");
}
}
@Override
public void close() throws IOException {
if (closed.getAndSet(true) == false) {
logger.trace("closing view starting at translog [{}]", minTranslogGeneration());
boolean removed = outstandingViews.remove(this);
assert removed : "View was never set but was supposed to be removed";
trimUnreferencedReaders();
closeFilesIfNoPendingViews();
}
}
}
public static class Location implements Accountable, Comparable<Location> {
public final long generation;
public final long translogLocation;
public final int size;
Location(long generation, long translogLocation, int size) {
this.generation = generation;
this.translogLocation = translogLocation;
this.size = size;
}
@Override
public long ramBytesUsed() {
return RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2 * Long.BYTES + Integer.BYTES;
}
@Override
public Collection<Accountable> getChildResources() {
return Collections.emptyList();
}
@Override
public String toString() {
return "[generation: " + generation + ", location: " + translogLocation + ", size: " + size + "]";
}
@Override
public int compareTo(Location o) {
if (generation == o.generation) {
return Long.compare(translogLocation, o.translogLocation);
}
return Long.compare(generation, o.generation);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Location location = (Location) o;
if (generation != location.generation) {
return false;
}
if (translogLocation != location.translogLocation) {
return false;
}
return size == location.size;
}
@Override
public int hashCode() {
int result = Long.hashCode(generation);
result = 31 * result + Long.hashCode(translogLocation);
result = 31 * result + size;
return result;
}
}
/**
* A snapshot of the transaction log, allows to iterate over all the transaction log operations.
*/
public interface Snapshot {
/**
* The total number of operations in the translog.
*/
int totalOperations();
/**
* Returns the next operation in the snapshot or <code>null</code> if we reached the end.
*/
Translog.Operation next() throws IOException;
}
/**
* A generic interface representing an operation performed on the transaction log.
* Each is associated with a type.
*/
public interface Operation extends Streamable {
enum Type {
@Deprecated
CREATE((byte) 1),
INDEX((byte) 2),
DELETE((byte) 3);
private final byte id;
private Type(byte id) {
this.id = id;
}
public byte id() {
return this.id;
}
public static Type fromId(byte id) {
switch (id) {
case 1:
return CREATE;
case 2:
return INDEX;
case 3:
return DELETE;
default:
throw new IllegalArgumentException("No type mapped for [" + id + "]");
}
}
}
Type opType();
long estimateSize();
Source getSource();
}
public static class Source {
public final BytesReference source;
public final String routing;
public final String parent;
public final long timestamp;
public final long ttl;
public Source(BytesReference source, String routing, String parent, long timestamp, long ttl) {
this.source = source;
this.routing = routing;
this.parent = parent;
this.timestamp = timestamp;
this.ttl = ttl;
}
}
public static class Index implements Operation {
public static final int SERIALIZATION_FORMAT = 6;
private String id;
private String type;
private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL;
private BytesReference source;
private String routing;
private String parent;
private long timestamp;
private long ttl;
public Index() {
}
public Index(Engine.Index index) {
this.id = index.id();
this.type = index.type();
this.source = index.source();
this.routing = index.routing();
this.parent = index.parent();
this.version = index.version();
this.timestamp = index.timestamp();
this.ttl = index.ttl();
this.versionType = index.versionType();
}
public Index(String type, String id, byte[] source) {
this.type = type;
this.id = id;
this.source = new BytesArray(source);
}
@Override
public Type opType() {
return Type.INDEX;
}
@Override
public long estimateSize() {
return ((id.length() + type.length()) * 2) + source.length() + 12;
}
public String type() {
return this.type;
}
public String id() {
return this.id;
}
public String routing() {
return this.routing;
}
public String parent() {
return this.parent;
}
public long timestamp() {
return this.timestamp;
}
public long ttl() {
return this.ttl;
}
public BytesReference source() {
return this.source;
}
public long version() {
return this.version;
}
public VersionType versionType() {
return versionType;
}
@Override
public Source getSource() {
return new Source(source, routing, parent, timestamp, ttl);
}
@Override
public void readFrom(StreamInput in) throws IOException {
int version = in.readVInt(); // version
id = in.readString();
type = in.readString();
source = in.readBytesReference();
try {
if (version >= 1) {
if (in.readBoolean()) {
routing = in.readString();
}
}
if (version >= 2) {
if (in.readBoolean()) {
parent = in.readString();
}
}
if (version >= 3) {
this.version = in.readLong();
}
if (version >= 4) {
this.timestamp = in.readLong();
}
if (version >= 5) {
this.ttl = in.readLong();
}
if (version >= 6) {
this.versionType = VersionType.fromValue(in.readByte());
}
} catch (Exception e) {
throw new ElasticsearchException("failed to read [" + type + "][" + id + "]", e);
}
assert versionType.validateVersionForWrites(version);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(SERIALIZATION_FORMAT);
out.writeString(id);
out.writeString(type);
out.writeBytesReference(source);
if (routing == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeString(routing);
}
if (parent == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeString(parent);
}
out.writeLong(version);
out.writeLong(timestamp);
out.writeLong(ttl);
out.writeByte(versionType.getValue());
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Index index = (Index) o;
if (version != index.version ||
timestamp != index.timestamp ||
ttl != index.ttl ||
id.equals(index.id) == false ||
type.equals(index.type) == false ||
versionType != index.versionType ||
source.equals(index.source) == false) {
return false;
}
if (routing != null ? !routing.equals(index.routing) : index.routing != null) {
return false;
}
return !(parent != null ? !parent.equals(index.parent) : index.parent != null);
}
@Override
public int hashCode() {
int result = id.hashCode();
result = 31 * result + type.hashCode();
result = 31 * result + Long.hashCode(version);
result = 31 * result + versionType.hashCode();
result = 31 * result + source.hashCode();
result = 31 * result + (routing != null ? routing.hashCode() : 0);
result = 31 * result + (parent != null ? parent.hashCode() : 0);
result = 31 * result + Long.hashCode(timestamp);
result = 31 * result + Long.hashCode(ttl);
return result;
}
@Override
public String toString() {
return "Index{" +
"id='" + id + '\'' +
", type='" + type + '\'' +
'}';
}
}
public static class Delete implements Operation {
public static final int SERIALIZATION_FORMAT = 2;
private Term uid;
private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL;
public Delete() {
}
public Delete(Engine.Delete delete) {
this(delete.uid());
this.version = delete.version();
this.versionType = delete.versionType();
}
public Delete(Term uid) {
this.uid = uid;
}
public Delete(Term uid, long version, VersionType versionType) {
this.uid = uid;
this.version = version;
this.versionType = versionType;
}
@Override
public Type opType() {
return Type.DELETE;
}
@Override
public long estimateSize() {
return ((uid.field().length() + uid.text().length()) * 2) + 20;
}
public Term uid() {
return this.uid;
}
public long version() {
return this.version;
}
public VersionType versionType() {
return this.versionType;
}
@Override
public Source getSource() {
throw new IllegalStateException("trying to read doc source from delete operation");
}
@Override
public void readFrom(StreamInput in) throws IOException {
int version = in.readVInt(); // version
uid = new Term(in.readString(), in.readString());
if (version >= 1) {
this.version = in.readLong();
}
if (version >= 2) {
this.versionType = VersionType.fromValue(in.readByte());
}
assert versionType.validateVersionForWrites(version);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(SERIALIZATION_FORMAT);
out.writeString(uid.field());
out.writeString(uid.text());
out.writeLong(version);
out.writeByte(versionType.getValue());
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Delete delete = (Delete) o;
return version == delete.version &&
uid.equals(delete.uid) &&
versionType == delete.versionType;
}
@Override
public int hashCode() {
int result = uid.hashCode();
result = 31 * result + Long.hashCode(version);
result = 31 * result + versionType.hashCode();
return result;
}
@Override
public String toString() {
return "Delete{" +
"uid=" + uid +
'}';
}
}
public enum Durability {
/**
* Async durability - translogs are synced based on a time interval.
*/
ASYNC,
/**
* Request durability - translogs are synced for each high levle request (bulk, index, delete)
*/
REQUEST;
}
private static void verifyChecksum(BufferedChecksumStreamInput in) throws IOException {
// This absolutely must come first, or else reading the checksum becomes part of the checksum
long expectedChecksum = in.getChecksum();
long readChecksum = in.readInt() & 0xFFFF_FFFFL;
if (readChecksum != expectedChecksum) {
throw new TranslogCorruptedException("translog stream is corrupted, expected: 0x" +
Long.toHexString(expectedChecksum) + ", got: 0x" + Long.toHexString(readChecksum));
}
}
/**
* Reads a list of operations written with {@link #writeOperations(StreamOutput, List)}
*/
public static List<Operation> readOperations(StreamInput input) throws IOException {
ArrayList<Operation> operations = new ArrayList<>();
int numOps = input.readInt();
final BufferedChecksumStreamInput checksumStreamInput = new BufferedChecksumStreamInput(input);
for (int i = 0; i < numOps; i++) {
operations.add(readOperation(checksumStreamInput));
}
return operations;
}
static Translog.Operation readOperation(BufferedChecksumStreamInput in) throws IOException {
Translog.Operation operation;
try {
final int opSize = in.readInt();
if (opSize < 4) { // 4byte for the checksum
throw new AssertionError("operation size must be at least 4 but was: " + opSize);
}
in.resetDigest(); // size is not part of the checksum!
if (in.markSupported()) { // if we can we validate the checksum first
// we are sometimes called when mark is not supported this is the case when
// we are sending translogs across the network with LZ4 compression enabled - currently there is no way s
// to prevent this unfortunately.
in.mark(opSize);
in.skip(opSize - 4);
verifyChecksum(in);
in.reset();
}
Translog.Operation.Type type = Translog.Operation.Type.fromId(in.readByte());
operation = newOperationFromType(type);
operation.readFrom(in);
verifyChecksum(in);
} catch (EOFException e) {
throw new TruncatedTranslogException("reached premature end of file, translog is truncated", e);
} catch (AssertionError | Exception e) {
throw new TranslogCorruptedException("translog corruption while reading from stream", e);
}
return operation;
}
/**
* Writes all operations in the given iterable to the given output stream including the size of the array
* use {@link #readOperations(StreamInput)} to read it back.
*/
public static void writeOperations(StreamOutput outStream, List<Operation> toWrite) throws IOException {
final ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput(BigArrays.NON_RECYCLING_INSTANCE);
try {
outStream.writeInt(toWrite.size());
final BufferedChecksumStreamOutput checksumStreamOutput = new BufferedChecksumStreamOutput(out);
for (Operation op : toWrite) {
out.reset();
final long start = out.position();
out.skip(Integer.BYTES);
writeOperationNoSize(checksumStreamOutput, op);
long end = out.position();
int operationSize = (int) (out.position() - Integer.BYTES - start);
out.seek(start);
out.writeInt(operationSize);
out.seek(end);
ReleasablePagedBytesReference bytes = out.bytes();
bytes.writeTo(outStream);
}
} finally {
Releasables.close(out.bytes());
}
}
public static void writeOperationNoSize(BufferedChecksumStreamOutput out, Translog.Operation op) throws IOException {
// This BufferedChecksumStreamOutput remains unclosed on purpose,
// because closing it closes the underlying stream, which we don't
// want to do here.
out.resetDigest();
out.writeByte(op.opType().id());
op.writeTo(out);
long checksum = out.getChecksum();
out.writeInt((int) checksum);
}
/**
* Returns a new empty translog operation for the given {@link Translog.Operation.Type}
*/
static Translog.Operation newOperationFromType(Translog.Operation.Type type) throws IOException {
switch (type) {
case CREATE:
// the deserialization logic in Index was identical to that of Create when create was deprecated
return new Index();
case DELETE:
return new Translog.Delete();
case INDEX:
return new Index();
default:
throw new IOException("No type for [" + type + "]");
}
}
@Override
public void prepareCommit() throws IOException {
try (ReleasableLock lock = writeLock.acquire()) {
ensureOpen();
if (currentCommittingGeneration != NOT_SET_GENERATION) {
throw new IllegalStateException("already committing a translog with generation: " + currentCommittingGeneration);
}
currentCommittingGeneration = current.getGeneration();
TranslogReader currentCommittingTranslog = current.closeIntoReader();
readers.add(currentCommittingTranslog);
Path checkpoint = location.resolve(CHECKPOINT_FILE_NAME);
assert Checkpoint.read(checkpoint).generation == currentCommittingTranslog.getGeneration();
Path commitCheckpoint = location.resolve(getCommitCheckpointFileName(currentCommittingTranslog.getGeneration()));
Files.copy(checkpoint, commitCheckpoint);
IOUtils.fsync(commitCheckpoint, false);
IOUtils.fsync(commitCheckpoint.getParent(), true);
// create a new translog file - this will sync it and update the checkpoint data;
current = createWriter(current.getGeneration() + 1);
logger.trace("current translog set to [{}]", current.getGeneration());
} catch (Throwable t) {
IOUtils.closeWhileHandlingException(this); // tragic event
throw t;
}
}
@Override
public void commit() throws IOException {
try (ReleasableLock lock = writeLock.acquire()) {
ensureOpen();
if (currentCommittingGeneration == NOT_SET_GENERATION) {
prepareCommit();
}
assert currentCommittingGeneration != NOT_SET_GENERATION;
assert readers.stream().filter(r -> r.getGeneration() == currentCommittingGeneration).findFirst().isPresent()
: "reader list doesn't contain committing generation [" + currentCommittingGeneration + "]";
lastCommittedTranslogFileGeneration = current.getGeneration(); // this is important - otherwise old files will not be cleaned up
currentCommittingGeneration = NOT_SET_GENERATION;
trimUnreferencedReaders();
}
}
void trimUnreferencedReaders() {
try (ReleasableLock ignored = writeLock.acquire()) {
if (closed.get()) {
// we're shutdown potentially on some tragic event - don't delete anything
return;
}
long minReferencedGen = outstandingViews.stream().mapToLong(View::minTranslogGeneration).min().orElse(Long.MAX_VALUE);
minReferencedGen = Math.min(lastCommittedTranslogFileGeneration, minReferencedGen);
final long finalMinReferencedGen = minReferencedGen;
List<TranslogReader> unreferenced = readers.stream().filter(r -> r.getGeneration() < finalMinReferencedGen).collect(Collectors.toList());
for (final TranslogReader unreferencedReader : unreferenced) {
Path translogPath = unreferencedReader.path();
logger.trace("delete translog file - not referenced and not current anymore {}", translogPath);
IOUtils.closeWhileHandlingException(unreferencedReader);
IOUtils.deleteFilesIgnoringExceptions(translogPath,
translogPath.resolveSibling(getCommitCheckpointFileName(unreferencedReader.getGeneration())));
}
readers.removeAll(unreferenced);
}
}
void closeFilesIfNoPendingViews() throws IOException {
try (ReleasableLock ignored = writeLock.acquire()) {
if (closed.get() && outstandingViews.isEmpty()) {
logger.trace("closing files. translog is closed and there are no pending views");
ArrayList<Closeable> toClose = new ArrayList<>(readers);
toClose.add(current);
IOUtils.close(toClose);
}
}
}
@Override
public void rollback() throws IOException {
ensureOpen();
close();
}
/**
* References a transaction log generation
*/
public final static class TranslogGeneration {
public final String translogUUID;
public final long translogFileGeneration;
public TranslogGeneration(String translogUUID, long translogFileGeneration) {
this.translogUUID = translogUUID;
this.translogFileGeneration = translogFileGeneration;
}
}
/**
* Returns the current generation of this translog. This corresponds to the latest uncommitted translog generation
*/
public TranslogGeneration getGeneration() {
try (ReleasableLock lock = writeLock.acquire()) {
return new TranslogGeneration(translogUUID, currentFileGeneration());
}
}
/**
* Returns <code>true</code> iff the given generation is the current gbeneration of this translog
*/
public boolean isCurrent(TranslogGeneration generation) {
try (ReleasableLock lock = writeLock.acquire()) {
if (generation != null) {
if (generation.translogUUID.equals(translogUUID) == false) {
throw new IllegalArgumentException("commit belongs to a different translog: " + generation.translogUUID + " vs. " + translogUUID);
}
return generation.translogFileGeneration == currentFileGeneration();
}
}
return false;
}
long getFirstOperationPosition() { // for testing
return current.getFirstOperationOffset();
}
private void ensureOpen() {
if (closed.get()) {
throw new AlreadyClosedException("translog is already closed", current.getTragicException());
}
}
/**
* The number of currently open views
*/
int getNumOpenViews() {
return outstandingViews.size();
}
TranslogWriter.ChannelFactory getChannelFactory() {
return TranslogWriter.ChannelFactory.DEFAULT;
}
/**
* If this {@code Translog} was closed as a side-effect of a tragic exception,
* e.g. disk full while flushing a new segment, this returns the root cause exception.
* Otherwise (no tragic exception has occurred) it returns null.
*/
public Throwable getTragicException() {
return current.getTragicException();
}
/** Reads and returns the current checkpoint */
final Checkpoint readCheckpoint() throws IOException {
return Checkpoint.read(location.resolve(CHECKPOINT_FILE_NAME));
}
/**
* Returns the translog uuid used to associate a lucene index with a translog.
*/
public String getTranslogUUID() {
return translogUUID;
}
}
| {
"content_hash": "f1195fd2c27ceeb849d6220a0b68598f",
"timestamp": "",
"source": "github",
"line_count": 1340,
"max_line_length": 246,
"avg_line_length": 40.65149253731343,
"alnum_prop": 0.6107612945863088,
"repo_name": "nomoa/elasticsearch",
"id": "93f9db67b357a56df94deefb7e37aa6f5642244b",
"size": "55261",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/src/main/java/org/elasticsearch/index/translog/Translog.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "7673"
},
{
"name": "Batchfile",
"bytes": "15860"
},
{
"name": "Emacs Lisp",
"bytes": "3341"
},
{
"name": "FreeMarker",
"bytes": "45"
},
{
"name": "Groovy",
"bytes": "249570"
},
{
"name": "HTML",
"bytes": "5595"
},
{
"name": "Java",
"bytes": "34926360"
},
{
"name": "Perl",
"bytes": "7116"
},
{
"name": "Python",
"bytes": "76111"
},
{
"name": "Shell",
"bytes": "101972"
}
],
"symlink_target": ""
} |
Vims::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both thread web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_assets = false
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
# config.assets.precompile += %w( search.js )
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
end
| {
"content_hash": "c4874ab8906514e54bd22a165f585603",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 104,
"avg_line_length": 40.6,
"alnum_prop": 0.7459975369458128,
"repo_name": "rli9/vims",
"id": "b782b68c8a01eb936744c1ca4e2ba0b3dab9c84d",
"size": "3248",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config/environments/production.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "321"
},
{
"name": "CSS",
"bytes": "41362"
},
{
"name": "HTML",
"bytes": "154450"
},
{
"name": "JavaScript",
"bytes": "664"
},
{
"name": "Nginx",
"bytes": "1531"
},
{
"name": "Ruby",
"bytes": "260976"
}
],
"symlink_target": ""
} |
#region Header
// Copyright (c) 2012 Diego Frata
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software
// and associated documentation files (the "Software"), to deal in the Software without restriction,
// including without limitation the rights to use, copy, modif/ merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do
// so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial
// portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
// NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#endregion
#region Using Statements
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
#endregion
namespace RdioNet.Models.Activities
{
/// <summary>
/// Represents an update activity involving one or more <seealso cref="RdioCollectionAlbum"/> objects.
/// </summary>
public class RdioCollectionActivityUpdate : RdioActivityUpdate
{
#region Public Properties
/// <summary>
/// A list of albums affected by the update.
/// </summary>
public IList<RdioAlbum> Albums
{
get;
set;
}
#endregion
}
} | {
"content_hash": "a125ce2b56db96c5469163d86503407a",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 106,
"avg_line_length": 33.22641509433962,
"alnum_prop": 0.7314026121521863,
"repo_name": "diegofrata/RdioNet",
"id": "702e81d2a181cc471a0660aa9de2ff03c50a8c56",
"size": "1763",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "RdioNet/Models/Activities/RdioCollectionActivityUpdate.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "290"
},
{
"name": "C#",
"bytes": "256010"
}
],
"symlink_target": ""
} |
package org.apache.druid.realtime.firehose;
import com.google.common.collect.Lists;
import org.apache.druid.data.input.Firehose;
import org.apache.druid.data.input.FirehoseFactory;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.Row;
import org.apache.druid.data.input.impl.InputRowParser;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.parsers.ParseException;
import org.apache.druid.segment.realtime.firehose.CombiningFirehoseFactory;
import org.apache.druid.utils.Runnables;
import org.joda.time.DateTime;
import org.junit.Assert;
import org.junit.Test;
import javax.annotation.Nullable;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
public class CombiningFirehoseFactoryTest
{
@Test
public void testCombiningfirehose() throws IOException
{
List<InputRow> list1 = Arrays.asList(makeRow(1, 1), makeRow(2, 2));
List<InputRow> list2 = Arrays.asList(makeRow(3, 3), makeRow(4, 4), makeRow(5, 5));
FirehoseFactory combiningFactory = new CombiningFirehoseFactory(
Arrays.asList(
new ListFirehoseFactory(list1),
new ListFirehoseFactory(list2)
)
);
final Firehose firehose = combiningFactory.connect(null, null);
for (int i = 1; i < 6; i++) {
Assert.assertTrue(firehose.hasMore());
final InputRow inputRow = firehose.nextRow();
Assert.assertEquals(i, inputRow.getTimestampFromEpoch());
Assert.assertEquals(i, inputRow.getMetric("test").floatValue(), 0);
}
Assert.assertFalse(firehose.hasMore());
}
private InputRow makeRow(final long timestamp, final float metricValue)
{
return new InputRow()
{
@Override
public List<String> getDimensions()
{
return Collections.singletonList("testDim");
}
@Override
public long getTimestampFromEpoch()
{
return timestamp;
}
@Override
public DateTime getTimestamp()
{
return DateTimes.utc(timestamp);
}
@Override
public List<String> getDimension(String dimension)
{
return Lists.newArrayList();
}
@Override
public Number getMetric(String metric)
{
return metricValue;
}
@Override
public Object getRaw(String dimension)
{
return null;
}
@Override
public int compareTo(Row o)
{
return 0;
}
};
}
public static class ListFirehoseFactory implements FirehoseFactory<InputRowParser>
{
private final List<InputRow> rows;
ListFirehoseFactory(List<InputRow> rows)
{
this.rows = rows;
}
@Override
public Firehose connect(InputRowParser inputRowParser, File temporaryDirectory) throws ParseException
{
final Iterator<InputRow> iterator = rows.iterator();
return new Firehose()
{
@Override
public boolean hasMore()
{
return iterator.hasNext();
}
@Nullable
@Override
public InputRow nextRow()
{
return iterator.next();
}
@Override
public Runnable commit()
{
return Runnables.getNoopRunnable();
}
@Override
public void close()
{
// Do nothing
}
};
}
}
}
| {
"content_hash": "6bc9b8ddd4f935debf7c1a896c95fae3",
"timestamp": "",
"source": "github",
"line_count": 142,
"max_line_length": 105,
"avg_line_length": 24.387323943661972,
"alnum_prop": 0.6494369044181346,
"repo_name": "dkhwangbo/druid",
"id": "2f3729f30dc7b051d66a45308ddb42d907a4259e",
"size": "4270",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/src/test/java/org/apache/druid/realtime/firehose/CombiningFirehoseFactoryTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "3345"
},
{
"name": "CSS",
"bytes": "15658"
},
{
"name": "Dockerfile",
"bytes": "4856"
},
{
"name": "HTML",
"bytes": "19754"
},
{
"name": "Java",
"bytes": "21183046"
},
{
"name": "JavaScript",
"bytes": "304058"
},
{
"name": "Makefile",
"bytes": "659"
},
{
"name": "PostScript",
"bytes": "5"
},
{
"name": "R",
"bytes": "17002"
},
{
"name": "Roff",
"bytes": "3617"
},
{
"name": "Shell",
"bytes": "28297"
},
{
"name": "TeX",
"bytes": "399508"
},
{
"name": "Thrift",
"bytes": "207"
}
],
"symlink_target": ""
} |
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<table class="displaytable">
<tr>
<td class="description">
<nobr>$Encoder.bodyEscape($ResourceBundle.getString('NuxeoRepositoryConnector.Protocol'))</nobr>
</td>
<td class="value">
<nobr>$Encoder.bodyEscape($NUXEO_PROTOCOL)</nobr>
</td>
</tr>
<tr>
<td class="description">
<nobr>$Encoder.bodyEscape($ResourceBundle.getString('NuxeoRepositoryConnector.Host'))</nobr>
</td>
<td class="value">
<nobr>$Encoder.bodyEscape($NUXEO_HOST)</nobr>
</td>
</tr>
<tr>
<td class="description">
<nobr>$Encoder.bodyEscape($ResourceBundle.getString('NuxeoRepositoryConnector.Port'))</nobr>
</td>
<td class="value">
<nobr>$Encoder.bodyEscape($NUXEO_PORT)</nobr>
</td>
</tr>
<tr>
<td class="description">
<nobr>$Encoder.bodyEscape($ResourceBundle.getString('NuxeoRepositoryConnector.Path'))</nobr>
</td>
<td class="value">
<nobr>$Encoder.bodyEscape($NUXEO_PATH)</nobr>
</td>
</tr>
<tr>
<td class="description">
<nobr>$Encoder.bodyEscape($ResourceBundle.getString('NuxeoRepositoryConnector.Username'))</nobr>
</td>
<td class="value">
<nobr>$Encoder.bodyEscape($NUXEO_USERNAME)</nobr>
</td>
</tr>
<tr>
<td class="description">
<nobr>$Encoder.bodyEscape($ResourceBundle.getString('NuxeoRepositoryConnector.Password'))</nobr>
</td>
<td class="value">
<nobr>*****</nobr>
</td>
</table> | {
"content_hash": "131147782e6dec74779bd37be8eba7b9",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 102,
"avg_line_length": 32.779411764705884,
"alnum_prop": 0.6792283535217586,
"repo_name": "kishorejangid/manifoldcf",
"id": "493ba2be56524f1a079cf8e5b4168bb91afe25e3",
"size": "2229",
"binary": false,
"copies": "3",
"ref": "refs/heads/trunk",
"path": "connectors/nuxeo/connector/src/main/resources/org/apache/manifoldcf/crawler/connectors/nuxeo/viewConfiguration_conf.html",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "33401"
},
{
"name": "C",
"bytes": "40584"
},
{
"name": "CSS",
"bytes": "73683"
},
{
"name": "HTML",
"bytes": "471700"
},
{
"name": "Java",
"bytes": "11901668"
},
{
"name": "JavaScript",
"bytes": "172709"
},
{
"name": "Makefile",
"bytes": "1712"
},
{
"name": "Python",
"bytes": "158817"
},
{
"name": "Shell",
"bytes": "37247"
},
{
"name": "XSLT",
"bytes": "31845"
}
],
"symlink_target": ""
} |
package pdp.web;
import org.junit.Test;
import pdp.domain.Validation;
import pdp.ip.IPInfo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class ValidationControllerTest {
private ValidationController subject = new ValidationController();
@Test
public void validation() throws Exception {
assertTrue(subject.validation(new Validation("ip", "fe80:0:0:0:0:0:c0a8:11")));
assertTrue(subject.validation(new Validation("ip", "192.168.2.255")));
assertFalse(subject.validation(new Validation("ip", "nope")));
}
@Test
public void ipInfo() throws Exception {
assertIpInfo(
subject.ipInfo("fe80:0:0:0:0:0:c0a8:11", 32),
"fe80:0:ffff:ffff:ffff:ffff:ffff:ffff",
"fe80:0:0:0:0:0:0:0",
7.922816251426434E28D,
false);
assertIpInfo(
subject.ipInfo("192.168.6.56", 21),
"192.168.7.255",
"192.168.0.0",
2048D,
true);
}
private void assertIpInfo(IPInfo ipInfo, String broadCastAddress, String networkAddress, double capacity, boolean isIpv4) {
assertEquals(broadCastAddress, ipInfo.getBroadcastAddress());
assertEquals(networkAddress, ipInfo.getNetworkAddress());
assertEquals(capacity, ipInfo.getCapacity(), 1D);
assertEquals(isIpv4, ipInfo.isIpv4());
}
} | {
"content_hash": "f9f1f3d643eeee8e5493dbc236bc8e5b",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 127,
"avg_line_length": 31.23404255319149,
"alnum_prop": 0.6478201634877384,
"repo_name": "OpenConext/OpenConext-pdp",
"id": "d7834e248bd813ad39cd049de5bed3f49f07c371",
"size": "1468",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pdp-server/src/test/java/pdp/web/ValidationControllerTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "8041"
},
{
"name": "EJS",
"bytes": "251"
},
{
"name": "HTML",
"bytes": "3981"
},
{
"name": "Java",
"bytes": "336109"
},
{
"name": "JavaScript",
"bytes": "189641"
},
{
"name": "SCSS",
"bytes": "70930"
},
{
"name": "Sass",
"bytes": "35335"
},
{
"name": "Shell",
"bytes": "125"
}
],
"symlink_target": ""
} |
require_relative 'helper'
require 'rack/test'
require 'api'
describe 'the API' do
include Rack::Test::Methods
let(:app) { Sinatra::Application }
it 'handles unfound pages' do
get '/foo'
last_response.status.must_equal 404
end
it 'will not process an invalid date' do
get '/2010-31-01'
last_response.must_be :unprocessable?
end
it 'will not process a date before 2000' do
get '/1999-01-01'
last_response.must_be :unprocessable?
end
it 'will not process an invalid base' do
get '/latest?base=UAH'
last_response.must_be :unprocessable?
end
it 'handles malformed queries' do
get 'latest?base=USD?callback=?'
last_response.must_be :unprocessable?
end
end
| {
"content_hash": "12bccbf159225dd4fd214255f8e1d09b",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 45,
"avg_line_length": 21.235294117647058,
"alnum_prop": 0.682825484764543,
"repo_name": "pronebird/fixer-io",
"id": "3937c3566b39997ea6824d4b09a0ade8f665d8d7",
"size": "722",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/edge_cases_spec.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "10049"
},
{
"name": "Shell",
"bytes": "400"
}
],
"symlink_target": ""
} |
function dataOnLoad_initialLoad(element) {
var listItem, dataList = element.getElementById('dataList');
for (var i = 1; i < 10; i++) {
listItem = document.createElement('div');
listItem.setAttribute('data-bb-type', 'item');
listItem.setAttribute('data-bb-img', 'images/icons/icon11.png');
listItem.setAttribute('data-bb-title', 'Title ' + i);
listItem.innerHTML = 'My description ' + i;
dataList.appendChild(listItem);
}
} | {
"content_hash": "05a33a830b6432960196a2cf2cb2955d",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 66,
"avg_line_length": 33.76923076923077,
"alnum_prop": 0.6970387243735763,
"repo_name": "sixertoy/camera_ionic",
"id": "3a18bb1d2b9c2233f902dfedd5b02795a609bcfb",
"size": "1036",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "www/lib/bbui/samples/js/dataOnLoad.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1148985"
},
{
"name": "HTML",
"bytes": "119603"
},
{
"name": "JavaScript",
"bytes": "3422157"
}
],
"symlink_target": ""
} |
<?php
namespace Cake\Shell;
use Cake\Console\Shell;
use Cake\Core\Configure;
/**
* built-in Server Shell
*/
class ServerShell extends Shell
{
/**
* Default ServerHost
*
* @var string
*/
const DEFAULT_HOST = 'localhost';
/**
* Default ListenPort
*
* @var int
*/
const DEFAULT_PORT = 8765;
/**
* server host
*
* @var string
*/
protected $_host = self::DEFAULT_HOST;
/**
* listen port
*
* @var int
*/
protected $_port = self::DEFAULT_PORT;
/**
* document root
*
* @var string
*/
protected $_documentRoot = WWW_ROOT;
/**
* ini path
*
* @var string
*/
protected $_iniPath = '';
/**
* Starts up the Shell and displays the welcome message.
* Allows for checking and configuring prior to command or main execution
*
* Override this method if you want to remove the welcome information,
* or otherwise modify the pre-command flow.
*
* @return void
* @link https://book.cakephp.org/3.0/en/console-and-shells.html#hook-methods
*/
public function startup()
{
if ($this->param('host')) {
$this->_host = $this->param('host');
}
if ($this->param('port')) {
$this->_port = (int)$this->param('port');
}
if ($this->param('document_root')) {
$this->_documentRoot = $this->param('document_root');
}
if ($this->param('ini_path')) {
$this->_iniPath = $this->param('ini_path');
}
// For Windows
if (substr($this->_documentRoot, -1, 1) === DIRECTORY_SEPARATOR) {
$this->_documentRoot = substr($this->_documentRoot, 0, strlen($this->_documentRoot) - 1);
}
if (preg_match("/^([a-z]:)[\\\]+(.+)$/i", $this->_documentRoot, $m)) {
$this->_documentRoot = $m[1] . '\\' . $m[2];
}
$this->_iniPath = rtrim($this->_iniPath, DIRECTORY_SEPARATOR);
if (preg_match("/^([a-z]:)[\\\]+(.+)$/i", $this->_iniPath, $m)) {
$this->_iniPath = $m[1] . '\\' . $m[2];
}
parent::startup();
}
/**
* Displays a header for the shell
*
* @return void
*/
protected function _welcome()
{
$this->out();
$this->out(sprintf('<info>Welcome to CakePHP %s Console</info>', 'v' . Configure::version()));
$this->hr();
$this->out(sprintf('App : %s', APP_DIR));
$this->out(sprintf('Path: %s', APP));
$this->out(sprintf('DocumentRoot: %s', $this->_documentRoot));
$this->out(sprintf('Ini Path: %s', $this->_iniPath));
$this->hr();
}
/**
* Override main() to handle action
*
* @return void
*/
public function main()
{
$command = sprintf(
'php -S %s:%d -t %s',
$this->_host,
$this->_port,
escapeshellarg($this->_documentRoot)
);
if (!empty($this->_iniPath)) {
$command = sprintf('%s -c %s', $command, $this->_iniPath);
}
$command = sprintf('%s %s', $command, escapeshellarg($this->_documentRoot . '/index.php'));
$port = ':' . $this->_port;
$this->out(sprintf('built-in server is running in http://%s%s/', $this->_host, $port));
$this->out(sprintf('You can exit with <info>`CTRL-C`</info>'));
system($command);
}
/**
* Gets the option parser instance and configures it.
*
* @return \Cake\Console\ConsoleOptionParser
*/
public function getOptionParser()
{
$parser = parent::getOptionParser();
$parser->setDescription([
'PHP Built-in Server for CakePHP',
'<warning>[WARN] Don\'t use this in a production environment</warning>',
])->addOption('host', [
'short' => 'H',
'help' => 'ServerHost'
])->addOption('port', [
'short' => 'p',
'help' => 'ListenPort'
])->addOption('ini_path', [
'short' => 'I',
'help' => 'php.ini path'
])->addOption('document_root', [
'short' => 'd',
'help' => 'DocumentRoot'
]);
return $parser;
}
}
| {
"content_hash": "35bfe9460a6af92cc78ac4a733e2aaa4",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 102,
"avg_line_length": 25.502958579881657,
"alnum_prop": 0.4902552204176334,
"repo_name": "cloudfoundry/php-buildpack",
"id": "67b7ac3f23ad073022c94a8bb6de3caf8077e8dd",
"size": "4895",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "fixtures/cake_local_deps/vendor/cakephp/cakephp/src/Shell/ServerShell.php",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1724"
},
{
"name": "CSS",
"bytes": "27696"
},
{
"name": "Go",
"bytes": "73324"
},
{
"name": "HTML",
"bytes": "24121"
},
{
"name": "JavaScript",
"bytes": "20500"
},
{
"name": "Makefile",
"bytes": "295"
},
{
"name": "PHP",
"bytes": "541348"
},
{
"name": "Python",
"bytes": "584757"
},
{
"name": "Ruby",
"bytes": "1102"
},
{
"name": "SCSS",
"bytes": "15480"
},
{
"name": "Shell",
"bytes": "20264"
},
{
"name": "Smalltalk",
"bytes": "8"
},
{
"name": "Twig",
"bytes": "86464"
}
],
"symlink_target": ""
} |
'use strict';
// # S3 storage module for Ghost blog http://ghost.org/
var fs = require('fs');
var path = require('path');
var nodefn = require('when/node/function');
var when = require('when');
var readFile = nodefn.lift(fs.readFile);
var unlink = nodefn.lift(fs.unlink);
var AWS = require('aws-sdk');
var config = require('../config')._config.aws;
module.exports = function(options) {
options = options || {};
if (options.config) {
config = options.config;
AWS.config.update(config);
}
if (options.errors) errors = options.errors;
return module.exports;
};
// ### Save
// Saves the image to S3
// - image is the express image object
// - returns a promise which ultimately returns the full url to the uploaded image
module.exports.save = function(image) {
if (!config) return when.reject('ghost-s3 is not configured');
var targetDir = getTargetDir();
var targetFilename = getTargetName(image, targetDir);
var awsPath = 'https://' + config.bucket + '.s3.amazonaws.com/';
return readFile(image.path)
.then(function(buffer) {
var s3 = new AWS.S3({
accessKeyId: config.accessKeyId,
secretAccessKey: config.secretAccessKey,
bucket: config.bucket,
region: config.region
});
return nodefn.call(s3.putObject.bind(s3), {
Bucket: config.bucket,
Key: targetFilename,
Body: buffer,
ContentType: image.type,
CacheControl: 'max-age=' + (30 * 24 * 60 * 60) // 30 days
});
})
.then(function() {
return when.resolve(awsPath + targetFilename);
})
.catch(function(err) {
unlink(image.path);
errors.logError(err);
throw err;
});
};
// middleware for serving the files
module.exports.serve = function() {
// a no-op, these are absolute URLs
return function (req, res, next) {
next();
};
};
var MONTHS = [
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'
];
var getTargetDir = function() {
var now = new Date();
return path.join(now.getFullYear() + '', MONTHS[now.getMonth()]) + '/';
};
var getTargetName = function(image, targetDir) {
var ext = path.extname(image.name),
name = path.basename(image.name, ext).replace(/\W/g, '_');
return targetDir + name + '-' + Date.now() + ext;
};
// default error handler
var errors = {
logError: function(error) {
console.log('error in ghost-s3', error);
}
}; | {
"content_hash": "ded2c8a31f3c25d19f5133f6029d287f",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 82,
"avg_line_length": 25.908163265306122,
"alnum_prop": 0.5978731784166995,
"repo_name": "abbyoung/abigailyoung.com",
"id": "8d904db61934fb1353816cefbf880c654fab5c92",
"size": "2539",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/server/storage/s3.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "90396"
},
{
"name": "HTML",
"bytes": "76111"
},
{
"name": "JavaScript",
"bytes": "4915071"
}
],
"symlink_target": ""
} |
class Auditbeat < Formula
desc "Lightweight Shipper for Audit Data"
homepage "https://www.elastic.co/products/beats/auditbeat"
url "https://github.com/elastic/beats.git",
tag: "v8.5.1",
revision: "f81376bad511929eb90d584d2059c4c8a41fc691"
license "Apache-2.0"
head "https://github.com/elastic/beats.git", branch: "main"
bottle do
sha256 cellar: :any_skip_relocation, arm64_ventura: "b5a47a5a8be86676db31130c22c3cea007421a7d7c234b43b017a2fe8d8d3c9a"
sha256 cellar: :any_skip_relocation, arm64_monterey: "95edebb3721331b411ceeaadfd95c8c258da1228c23bd199df01898ad8bcffc3"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "f38006ad9934cdabe061e095582f9b2a6a79d4f46f721be747ad4da5690b1a12"
sha256 cellar: :any_skip_relocation, monterey: "90d2d60e1dc2031e6da3787b49d523c7d26679a4e8594bfb8762ed32132249ef"
sha256 cellar: :any_skip_relocation, big_sur: "a64e3f7d2655bb89f458f2ef6174e46bb2ca2962a91f91f0a7fe16d8ebb3b27d"
sha256 cellar: :any_skip_relocation, catalina: "391cc69f401d07e3050a73c2facb873f690db2cef380d1022efc34aa19f7afee"
sha256 cellar: :any_skip_relocation, x86_64_linux: "01534fe273bf679ae5a1205f2ae8c2274d552e1e403f9a1cd3f024f94f7e0235"
end
depends_on "go" => :build
depends_on "mage" => :build
depends_on "python@3.11" => :build
def install
# remove non open source files
rm_rf "x-pack"
cd "auditbeat" do
# don't build docs because it would fail creating the combined OSS/x-pack
# docs and we aren't installing them anyway
inreplace "magefile.go", "devtools.GenerateModuleIncludeListGo, Docs)",
"devtools.GenerateModuleIncludeListGo)"
# prevent downloading binary wheels during python setup
system "make", "PIP_INSTALL_PARAMS=--no-binary :all", "python-env"
system "mage", "-v", "build"
system "mage", "-v", "update"
(etc/"auditbeat").install Dir["auditbeat.*", "fields.yml"]
(libexec/"bin").install "auditbeat"
prefix.install "build/kibana"
end
(bin/"auditbeat").write <<~EOS
#!/bin/sh
exec #{libexec}/bin/auditbeat \
--path.config #{etc}/auditbeat \
--path.data #{var}/lib/auditbeat \
--path.home #{prefix} \
--path.logs #{var}/log/auditbeat \
"$@"
EOS
chmod 0555, bin/"auditbeat"
generate_completions_from_executable(bin/"auditbeat", "completion", shells: [:bash, :zsh])
end
def post_install
(var/"lib/auditbeat").mkpath
(var/"log/auditbeat").mkpath
end
service do
run opt_bin/"auditbeat"
end
test do
(testpath/"files").mkpath
(testpath/"config/auditbeat.yml").write <<~EOS
auditbeat.modules:
- module: file_integrity
paths:
- #{testpath}/files
output.file:
path: "#{testpath}/auditbeat"
filename: auditbeat
EOS
fork do
exec "#{bin}/auditbeat", "-path.config", testpath/"config", "-path.data", testpath/"data"
end
sleep 5
touch testpath/"files/touch"
sleep 30
assert_predicate testpath/"data/beat.db", :exist?
output = JSON.parse((testpath/"data/meta.json").read)
assert_includes output, "first_start"
end
end
| {
"content_hash": "9ac04691e091c43c798f4116f61ce7f3",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 123,
"avg_line_length": 35.505494505494504,
"alnum_prop": 0.6774992262457443,
"repo_name": "makigumo/homebrew-core",
"id": "b2876721027d590140f680e64abcb20d12eabb12",
"size": "3231",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "Formula/auditbeat.rb",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Perl",
"bytes": "740"
},
{
"name": "Ruby",
"bytes": "15824968"
}
],
"symlink_target": ""
} |
using System;
namespace PixelFarm.CpuBlit.Rasterization
{
//=============================================================scanline_p8
//
// This is a general purpose scanline container which supports the interface
// used in the rasterizer::render(). See description of scanline_u8
// for details.
//
//------------------------------------------------------------------------
public sealed class ScanlinePacked8 : Scanline
{
public ScanlinePacked8()
{
}
public override void ResetSpans(int min_x, int max_x)
{
int max_len = max_x - min_x + 3;
if (max_len > _spans.Length)
{
_spans = new ScanlineSpan[max_len];
_covers = new byte[max_len];
}
_last_x = 0x7FFFFFF0;
_cover_index = 0; //make it ready for next add
_last_span_index = 0;
_spans[_last_span_index].len = 0;
}
public override void AddCell(int x, int cover)
{
_covers[_cover_index] = (byte)cover;
if (x == _last_x + 1 && _spans[_last_span_index].len > 0)
{
//append to last cell
_spans[_last_span_index].len++;
}
else
{
//start new
_last_span_index++;
_spans[_last_span_index] = new ScanlineSpan((short)x, _cover_index);
}
_last_x = x;
_cover_index++; //make it ready for next add
}
public override void AddSpan(int x, int len, int cover)
{
#if DEBUG
int backupCover = cover;
#endif
if (x == _last_x + 1
&& _spans[_last_span_index].len < 0
&& cover == _spans[_last_span_index].cover_index)
{
//just append data to latest span ***
_spans[_last_span_index].len -= (short)len;
}
else
{
_covers[_cover_index] = (byte)cover;
_last_span_index++;
//---------------------------------------------------
//start new
_spans[_last_span_index] = new ScanlineSpan((short)x, (short)(-len), _cover_index);
_cover_index++; //make it ready for next add
}
_last_x = x + len - 1;
}
public override void ResetSpans()
{
_last_x = 0x7FFFFFF0;
_last_span_index = 0;
_cover_index = 0; //make it ready for next add
_spans[_last_span_index].len = 0;
}
}
}
| {
"content_hash": "85c42b23cd29a260f36bcb9089d11bc6",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 99,
"avg_line_length": 33.03703703703704,
"alnum_prop": 0.42750373692077726,
"repo_name": "LayoutFarm/PixelFarm",
"id": "5b4130a9e90539af5128bbc6677643be579d0fd8",
"size": "4047",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/PixelFarm/PixelFarm.CpuBlit_Layer1/02_Rasterization/2_ScanlinePacked8.cs",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C#",
"bytes": "16663734"
},
{
"name": "GLSL",
"bytes": "15555"
},
{
"name": "Smalltalk",
"bytes": "239"
}
],
"symlink_target": ""
} |
===============================
Server Tools for Vauxoo
===============================
.. image:: https://badge.fury.io/py/vxtools-server.png
:target: http://badge.fury.io/py/vxtools-server
.. image:: https://travis-ci.org/ruiztulio/vxtools-server.png?branch=master
:target: https://travis-ci.org/ruiztulio/vxtools-server
.. image:: https://pypip.in/d/vxtools-server/badge.png
:target: https://pypi.python.org/pypi/vxtools-server
Server tools, part of VauxooTools
* Free software: BSD license
* Documentation: https://vxtools-server.readthedocs.org.
Features
--------
* TODO | {
"content_hash": "1c0251ba353c9d8af9abf30cc1386fa4",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 75,
"avg_line_length": 26.217391304347824,
"alnum_prop": 0.6368159203980099,
"repo_name": "vauxoo-dev/vxtools-server",
"id": "4aec9075d794ef174b6bfa42f175a6aa9d3ea849",
"size": "603",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.rst",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "10532"
},
{
"name": "Shell",
"bytes": "6466"
}
],
"symlink_target": ""
} |
// +build !providerless
package azure
import (
"context"
"fmt"
"math"
"reflect"
"strconv"
"strings"
"github.com/Azure/azure-sdk-for-go/services/network/mgmt/2019-06-01/network"
"github.com/Azure/go-autorest/autorest/to"
v1 "k8s.io/api/core/v1"
utilerrors "k8s.io/apimachinery/pkg/util/errors"
"k8s.io/apimachinery/pkg/util/sets"
cloudprovider "k8s.io/cloud-provider"
servicehelpers "k8s.io/cloud-provider/service/helpers"
"k8s.io/klog"
utilnet "k8s.io/utils/net"
)
const (
// ServiceAnnotationLoadBalancerInternal is the annotation used on the service
ServiceAnnotationLoadBalancerInternal = "service.beta.kubernetes.io/azure-load-balancer-internal"
// ServiceAnnotationLoadBalancerInternalSubnet is the annotation used on the service
// to specify what subnet it is exposed on
ServiceAnnotationLoadBalancerInternalSubnet = "service.beta.kubernetes.io/azure-load-balancer-internal-subnet"
// ServiceAnnotationLoadBalancerMode is the annotation used on the service to specify the
// Azure load balancer selection based on availability sets
// There are currently three possible load balancer selection modes :
// 1. Default mode - service has no annotation ("service.beta.kubernetes.io/azure-load-balancer-mode")
// In this case the Loadbalancer of the primary Availability set is selected
// 2. "__auto__" mode - service is annotated with __auto__ value, this when loadbalancer from any availability set
// is selected which has the minimum rules associated with it.
// 3. "as1,as2" mode - this is when the load balancer from the specified availability sets is selected that has the
// minimum rules associated with it.
ServiceAnnotationLoadBalancerMode = "service.beta.kubernetes.io/azure-load-balancer-mode"
// ServiceAnnotationLoadBalancerAutoModeValue is the annotation used on the service to specify the
// Azure load balancer auto selection from the availability sets
ServiceAnnotationLoadBalancerAutoModeValue = "__auto__"
// ServiceAnnotationDNSLabelName is the annotation used on the service
// to specify the DNS label name for the service.
ServiceAnnotationDNSLabelName = "service.beta.kubernetes.io/azure-dns-label-name"
// ServiceAnnotationSharedSecurityRule is the annotation used on the service
// to specify that the service should be exposed using an Azure security rule
// that may be shared with other service, trading specificity of rules for an
// increase in the number of services that can be exposed. This relies on the
// Azure "augmented security rules" feature.
ServiceAnnotationSharedSecurityRule = "service.beta.kubernetes.io/azure-shared-securityrule"
// ServiceAnnotationLoadBalancerResourceGroup is the annotation used on the service
// to specify the resource group of load balancer objects that are not in the same resource group as the cluster.
ServiceAnnotationLoadBalancerResourceGroup = "service.beta.kubernetes.io/azure-load-balancer-resource-group"
// ServiceAnnotationPIPName specifies the pip that will be applied to load balancer
ServiceAnnotationPIPName = "service.beta.kubernetes.io/azure-pip-name"
// ServiceAnnotationAllowedServiceTag is the annotation used on the service
// to specify a list of allowed service tags separated by comma
// Refer https://docs.microsoft.com/en-us/azure/virtual-network/security-overview#service-tags for all supported service tags.
ServiceAnnotationAllowedServiceTag = "service.beta.kubernetes.io/azure-allowed-service-tags"
// ServiceAnnotationLoadBalancerIdleTimeout is the annotation used on the service
// to specify the idle timeout for connections on the load balancer in minutes.
ServiceAnnotationLoadBalancerIdleTimeout = "service.beta.kubernetes.io/azure-load-balancer-tcp-idle-timeout"
// ServiceAnnotationLoadBalancerMixedProtocols is the annotation used on the service
// to create both TCP and UDP protocols when creating load balancer rules.
ServiceAnnotationLoadBalancerMixedProtocols = "service.beta.kubernetes.io/azure-load-balancer-mixed-protocols"
// ServiceAnnotationLoadBalancerDisableTCPReset is the annotation used on the service
// to set enableTcpReset to false in load balancer rule. This only works for Azure standard load balancer backed service.
ServiceAnnotationLoadBalancerDisableTCPReset = "service.beta.kubernetes.io/azure-load-balancer-disable-tcp-reset"
// serviceTagKey is the service key applied for public IP tags.
serviceTagKey = "service"
// clusterNameKey is the cluster name key applied for public IP tags.
clusterNameKey = "kubernetes-cluster-name"
)
// GetLoadBalancer returns whether the specified load balancer and its components exist, and
// if so, what its status is.
func (az *Cloud) GetLoadBalancer(ctx context.Context, clusterName string, service *v1.Service) (status *v1.LoadBalancerStatus, exists bool, err error) {
// Since public IP is not a part of the load balancer on Azure,
// there is a chance that we could orphan public IP resources while we delete the load blanacer (kubernetes/kubernetes#80571).
// We need to make sure the existence of the load balancer depends on the load balancer resource and public IP resource on Azure.
existsPip := func() bool {
pipName, _, err := az.determinePublicIPName(clusterName, service)
if err != nil {
return false
}
pipResourceGroup := az.getPublicIPAddressResourceGroup(service)
_, existsPip, err := az.getPublicIPAddress(pipResourceGroup, pipName)
if err != nil {
return false
}
return existsPip
}()
_, status, existsLb, err := az.getServiceLoadBalancer(service, clusterName, nil, false)
if err != nil {
return nil, existsPip, err
}
// Return exists = false only if the load balancer and the public IP are not found on Azure
if !existsLb && !existsPip {
serviceName := getServiceName(service)
klog.V(5).Infof("getloadbalancer (cluster:%s) (service:%s) - doesn't exist", clusterName, serviceName)
return nil, false, nil
}
// Return exists = true if either the load balancer or the public IP (or both) exists
return status, true, nil
}
func getPublicIPDomainNameLabel(service *v1.Service) string {
if labelName, found := service.Annotations[ServiceAnnotationDNSLabelName]; found {
return labelName
}
return ""
}
// EnsureLoadBalancer creates a new load balancer 'name', or updates the existing one. Returns the status of the balancer
func (az *Cloud) EnsureLoadBalancer(ctx context.Context, clusterName string, service *v1.Service, nodes []*v1.Node) (*v1.LoadBalancerStatus, error) {
// When a client updates the internal load balancer annotation,
// the service may be switched from an internal LB to a public one, or vise versa.
// Here we'll firstly ensure service do not lie in the opposite LB.
serviceName := getServiceName(service)
klog.V(5).Infof("ensureloadbalancer(%s): START clusterName=%q", serviceName, clusterName)
lb, err := az.reconcileLoadBalancer(clusterName, service, nodes, true /* wantLb */)
if err != nil {
klog.Errorf("reconcileLoadBalancer(%s) failed: %v", serviceName, err)
return nil, err
}
lbStatus, err := az.getServiceLoadBalancerStatus(service, lb)
if err != nil {
klog.Errorf("getServiceLoadBalancerStatus(%s) failed: %v", serviceName, err)
return nil, err
}
var serviceIP *string
if lbStatus != nil && len(lbStatus.Ingress) > 0 {
serviceIP = &lbStatus.Ingress[0].IP
}
klog.V(2).Infof("EnsureLoadBalancer: reconciling security group for service %q with IP %q, wantLb = true", serviceName, logSafe(serviceIP))
if _, err := az.reconcileSecurityGroup(clusterName, service, serviceIP, true /* wantLb */); err != nil {
klog.Errorf("reconcileSecurityGroup(%s) failed: %#v", serviceName, err)
return nil, err
}
updateService := updateServiceLoadBalancerIP(service, to.String(serviceIP))
flippedService := flipServiceInternalAnnotation(updateService)
if _, err := az.reconcileLoadBalancer(clusterName, flippedService, nil, false /* wantLb */); err != nil {
klog.Errorf("reconcileLoadBalancer(%s) failed: %#v", serviceName, err)
return nil, err
}
// lb is not reused here because the ETAG may be changed in above operations, hence reconcilePublicIP() would get lb again from cache.
if _, err := az.reconcilePublicIP(clusterName, updateService, to.String(lb.Name), true /* wantLb */); err != nil {
klog.Errorf("reconcilePublicIP(%s) failed: %#v", serviceName, err)
return nil, err
}
return lbStatus, nil
}
// UpdateLoadBalancer updates hosts under the specified load balancer.
func (az *Cloud) UpdateLoadBalancer(ctx context.Context, clusterName string, service *v1.Service, nodes []*v1.Node) error {
if !az.shouldUpdateLoadBalancer(clusterName, service) {
klog.V(2).Infof("UpdateLoadBalancer: skipping service %s because it is either being deleted or does not exist anymore", service.Name)
return nil
}
_, err := az.EnsureLoadBalancer(ctx, clusterName, service, nodes)
return err
}
// EnsureLoadBalancerDeleted deletes the specified load balancer if it
// exists, returning nil if the load balancer specified either didn't exist or
// was successfully deleted.
// This construction is useful because many cloud providers' load balancers
// have multiple underlying components, meaning a Get could say that the LB
// doesn't exist even if some part of it is still laying around.
func (az *Cloud) EnsureLoadBalancerDeleted(ctx context.Context, clusterName string, service *v1.Service) error {
isInternal := requiresInternalLoadBalancer(service)
serviceName := getServiceName(service)
klog.V(5).Infof("Delete service (%s): START clusterName=%q", serviceName, clusterName)
serviceIPToCleanup, err := az.findServiceIPAddress(ctx, clusterName, service, isInternal)
if err != nil {
return err
}
klog.V(2).Infof("EnsureLoadBalancerDeleted: reconciling security group for service %q with IP %q, wantLb = false", serviceName, serviceIPToCleanup)
if _, err := az.reconcileSecurityGroup(clusterName, service, &serviceIPToCleanup, false /* wantLb */); err != nil {
return err
}
if _, err := az.reconcileLoadBalancer(clusterName, service, nil, false /* wantLb */); err != nil {
return err
}
if _, err := az.reconcilePublicIP(clusterName, service, "", false /* wantLb */); err != nil {
return err
}
klog.V(2).Infof("Delete service (%s): FINISH", serviceName)
return nil
}
// GetLoadBalancerName returns the LoadBalancer name.
func (az *Cloud) GetLoadBalancerName(ctx context.Context, clusterName string, service *v1.Service) string {
return cloudprovider.DefaultLoadBalancerName(service)
}
func (az *Cloud) getLoadBalancerResourceGroup() string {
if az.LoadBalancerResourceGroup != "" {
return az.LoadBalancerResourceGroup
}
return az.ResourceGroup
}
// getServiceLoadBalancer gets the loadbalancer for the service if it already exists.
// If wantLb is TRUE then -it selects a new load balancer.
// In case the selected load balancer does not exist it returns network.LoadBalancer struct
// with added metadata (such as name, location) and existsLB set to FALSE.
// By default - cluster default LB is returned.
func (az *Cloud) getServiceLoadBalancer(service *v1.Service, clusterName string, nodes []*v1.Node, wantLb bool) (lb *network.LoadBalancer, status *v1.LoadBalancerStatus, exists bool, err error) {
isInternal := requiresInternalLoadBalancer(service)
var defaultLB *network.LoadBalancer
primaryVMSetName := az.vmSet.GetPrimaryVMSetName()
defaultLBName := az.getAzureLoadBalancerName(clusterName, primaryVMSetName, isInternal)
existingLBs, err := az.ListLB(service)
if err != nil {
return nil, nil, false, err
}
// check if the service already has a load balancer
if existingLBs != nil {
for i := range existingLBs {
existingLB := existingLBs[i]
if strings.EqualFold(*existingLB.Name, defaultLBName) {
defaultLB = &existingLB
}
if isInternalLoadBalancer(&existingLB) != isInternal {
continue
}
status, err = az.getServiceLoadBalancerStatus(service, &existingLB)
if err != nil {
return nil, nil, false, err
}
if status == nil {
// service is not on this load balancer
continue
}
return &existingLB, status, true, nil
}
}
hasMode, _, _ := getServiceLoadBalancerMode(service)
if az.useStandardLoadBalancer() && hasMode {
return nil, nil, false, fmt.Errorf("standard load balancer doesn't work with annotation %q", ServiceAnnotationLoadBalancerMode)
}
// service does not have a basic load balancer, select one.
// Standard load balancer doesn't need this because all backends nodes should be added to same LB.
if wantLb && !az.useStandardLoadBalancer() {
// select new load balancer for service
selectedLB, exists, err := az.selectLoadBalancer(clusterName, service, &existingLBs, nodes)
if err != nil {
return nil, nil, false, err
}
return selectedLB, nil, exists, err
}
// create a default LB with meta data if not present
if defaultLB == nil {
defaultLB = &network.LoadBalancer{
Name: &defaultLBName,
Location: &az.Location,
LoadBalancerPropertiesFormat: &network.LoadBalancerPropertiesFormat{},
}
if az.useStandardLoadBalancer() {
defaultLB.Sku = &network.LoadBalancerSku{
Name: network.LoadBalancerSkuNameStandard,
}
}
}
return defaultLB, nil, false, nil
}
// selectLoadBalancer selects load balancer for the service in the cluster.
// The selection algorithm selects the load balancer which currently has
// the minimum lb rules. If there are multiple LBs with same number of rules,
// then selects the first one (sorted based on name).
func (az *Cloud) selectLoadBalancer(clusterName string, service *v1.Service, existingLBs *[]network.LoadBalancer, nodes []*v1.Node) (selectedLB *network.LoadBalancer, existsLb bool, err error) {
isInternal := requiresInternalLoadBalancer(service)
serviceName := getServiceName(service)
klog.V(2).Infof("selectLoadBalancer for service (%s): isInternal(%v) - start", serviceName, isInternal)
vmSetNames, err := az.vmSet.GetVMSetNames(service, nodes)
if err != nil {
klog.Errorf("az.selectLoadBalancer: cluster(%s) service(%s) isInternal(%t) - az.GetVMSetNames failed, err=(%v)", clusterName, serviceName, isInternal, err)
return nil, false, err
}
klog.Infof("selectLoadBalancer: cluster(%s) service(%s) isInternal(%t) - vmSetNames %v", clusterName, serviceName, isInternal, *vmSetNames)
mapExistingLBs := map[string]network.LoadBalancer{}
for _, lb := range *existingLBs {
mapExistingLBs[*lb.Name] = lb
}
selectedLBRuleCount := math.MaxInt32
for _, currASName := range *vmSetNames {
currLBName := az.getAzureLoadBalancerName(clusterName, currASName, isInternal)
lb, exists := mapExistingLBs[currLBName]
if !exists {
// select this LB as this is a new LB and will have minimum rules
// create tmp lb struct to hold metadata for the new load-balancer
selectedLB = &network.LoadBalancer{
Name: &currLBName,
Location: &az.Location,
LoadBalancerPropertiesFormat: &network.LoadBalancerPropertiesFormat{},
}
return selectedLB, false, nil
}
lbRules := *lb.LoadBalancingRules
currLBRuleCount := 0
if lbRules != nil {
currLBRuleCount = len(lbRules)
}
if currLBRuleCount < selectedLBRuleCount {
selectedLBRuleCount = currLBRuleCount
selectedLB = &lb
}
}
if selectedLB == nil {
err = fmt.Errorf("selectLoadBalancer: cluster(%s) service(%s) isInternal(%t) - unable to find load balancer for selected VM sets %v", clusterName, serviceName, isInternal, *vmSetNames)
klog.Error(err)
return nil, false, err
}
// validate if the selected LB has not exceeded the MaximumLoadBalancerRuleCount
if az.Config.MaximumLoadBalancerRuleCount != 0 && selectedLBRuleCount >= az.Config.MaximumLoadBalancerRuleCount {
err = fmt.Errorf("selectLoadBalancer: cluster(%s) service(%s) isInternal(%t) - all available load balancers have exceeded maximum rule limit %d, vmSetNames (%v)", clusterName, serviceName, isInternal, selectedLBRuleCount, *vmSetNames)
klog.Error(err)
return selectedLB, existsLb, err
}
return selectedLB, existsLb, nil
}
func (az *Cloud) getServiceLoadBalancerStatus(service *v1.Service, lb *network.LoadBalancer) (status *v1.LoadBalancerStatus, err error) {
if lb == nil {
klog.V(10).Info("getServiceLoadBalancerStatus: lb is nil")
return nil, nil
}
if lb.FrontendIPConfigurations == nil || *lb.FrontendIPConfigurations == nil {
klog.V(10).Info("getServiceLoadBalancerStatus: lb.FrontendIPConfigurations is nil")
return nil, nil
}
isInternal := requiresInternalLoadBalancer(service)
lbFrontendIPConfigName := az.getFrontendIPConfigName(service)
serviceName := getServiceName(service)
for _, ipConfiguration := range *lb.FrontendIPConfigurations {
if lbFrontendIPConfigName == *ipConfiguration.Name {
var lbIP *string
if isInternal {
lbIP = ipConfiguration.PrivateIPAddress
} else {
if ipConfiguration.PublicIPAddress == nil {
return nil, fmt.Errorf("get(%s): lb(%s) - failed to get LB PublicIPAddress is Nil", serviceName, *lb.Name)
}
pipID := ipConfiguration.PublicIPAddress.ID
if pipID == nil {
return nil, fmt.Errorf("get(%s): lb(%s) - failed to get LB PublicIPAddress ID is Nil", serviceName, *lb.Name)
}
pipName, err := getLastSegment(*pipID)
if err != nil {
return nil, fmt.Errorf("get(%s): lb(%s) - failed to get LB PublicIPAddress Name from ID(%s)", serviceName, *lb.Name, *pipID)
}
pip, existsPip, err := az.getPublicIPAddress(az.getPublicIPAddressResourceGroup(service), pipName)
if err != nil {
return nil, err
}
if existsPip {
lbIP = pip.IPAddress
}
}
klog.V(2).Infof("getServiceLoadBalancerStatus gets ingress IP %q from frontendIPConfiguration %q for service %q", to.String(lbIP), lbFrontendIPConfigName, serviceName)
return &v1.LoadBalancerStatus{Ingress: []v1.LoadBalancerIngress{{IP: to.String(lbIP)}}}, nil
}
}
return nil, nil
}
func (az *Cloud) determinePublicIPName(clusterName string, service *v1.Service) (string, bool, error) {
var shouldPIPExisted bool
if name, found := service.Annotations[ServiceAnnotationPIPName]; found && name != "" {
shouldPIPExisted = true
return name, shouldPIPExisted, nil
}
loadBalancerIP := service.Spec.LoadBalancerIP
if len(loadBalancerIP) == 0 {
return az.getPublicIPName(clusterName, service), shouldPIPExisted, nil
}
pipResourceGroup := az.getPublicIPAddressResourceGroup(service)
pips, err := az.ListPIP(service, pipResourceGroup)
if err != nil {
return "", shouldPIPExisted, err
}
for _, pip := range pips {
if pip.PublicIPAddressPropertiesFormat.IPAddress != nil &&
*pip.PublicIPAddressPropertiesFormat.IPAddress == loadBalancerIP {
return *pip.Name, shouldPIPExisted, nil
}
}
return "", shouldPIPExisted, fmt.Errorf("user supplied IP Address %s was not found in resource group %s", loadBalancerIP, pipResourceGroup)
}
func flipServiceInternalAnnotation(service *v1.Service) *v1.Service {
copyService := service.DeepCopy()
if copyService.Annotations == nil {
copyService.Annotations = map[string]string{}
}
if v, ok := copyService.Annotations[ServiceAnnotationLoadBalancerInternal]; ok && v == "true" {
// If it is internal now, we make it external by remove the annotation
delete(copyService.Annotations, ServiceAnnotationLoadBalancerInternal)
} else {
// If it is external now, we make it internal
copyService.Annotations[ServiceAnnotationLoadBalancerInternal] = "true"
}
return copyService
}
func updateServiceLoadBalancerIP(service *v1.Service, serviceIP string) *v1.Service {
copyService := service.DeepCopy()
if len(serviceIP) > 0 && copyService != nil {
copyService.Spec.LoadBalancerIP = serviceIP
}
return copyService
}
func (az *Cloud) findServiceIPAddress(ctx context.Context, clusterName string, service *v1.Service, isInternalLb bool) (string, error) {
if len(service.Spec.LoadBalancerIP) > 0 {
return service.Spec.LoadBalancerIP, nil
}
_, lbStatus, existsLb, err := az.getServiceLoadBalancer(service, clusterName, nil, false)
if err != nil {
return "", err
}
if !existsLb {
klog.V(2).Infof("Expected to find an IP address for service %s but did not. Assuming it has been removed", service.Name)
return "", nil
}
if len(lbStatus.Ingress) < 1 {
klog.V(2).Infof("Expected to find an IP address for service %s but it had no ingresses. Assuming it has been removed", service.Name)
return "", nil
}
return lbStatus.Ingress[0].IP, nil
}
func (az *Cloud) ensurePublicIPExists(service *v1.Service, pipName string, domainNameLabel, clusterName string, shouldPIPExisted bool) (*network.PublicIPAddress, error) {
pipResourceGroup := az.getPublicIPAddressResourceGroup(service)
pip, existsPip, err := az.getPublicIPAddress(pipResourceGroup, pipName)
if err != nil {
return nil, err
}
serviceName := getServiceName(service)
if existsPip {
// return if pip exist and dns label is the same
if getDomainNameLabel(&pip) == domainNameLabel {
return &pip, nil
}
klog.V(2).Infof("ensurePublicIPExists for service(%s): pip(%s) - updating", serviceName, *pip.Name)
if pip.PublicIPAddressPropertiesFormat == nil {
pip.PublicIPAddressPropertiesFormat = &network.PublicIPAddressPropertiesFormat{
PublicIPAllocationMethod: network.Static,
}
}
} else {
if shouldPIPExisted {
return nil, fmt.Errorf("PublicIP from annotation azure-pip-name=%s for service %s doesn't exist", pipName, serviceName)
}
pip.Name = to.StringPtr(pipName)
pip.Location = to.StringPtr(az.Location)
pip.PublicIPAddressPropertiesFormat = &network.PublicIPAddressPropertiesFormat{
PublicIPAllocationMethod: network.Static,
}
pip.Tags = map[string]*string{
serviceTagKey: &serviceName,
clusterNameKey: &clusterName,
}
if az.useStandardLoadBalancer() {
pip.Sku = &network.PublicIPAddressSku{
Name: network.PublicIPAddressSkuNameStandard,
}
}
klog.V(2).Infof("ensurePublicIPExists for service(%s): pip(%s) - creating", serviceName, *pip.Name)
}
if len(domainNameLabel) == 0 {
pip.PublicIPAddressPropertiesFormat.DNSSettings = nil
} else {
pip.PublicIPAddressPropertiesFormat.DNSSettings = &network.PublicIPAddressDNSSettings{
DomainNameLabel: &domainNameLabel,
}
}
if az.ipv6DualStackEnabled {
// TODO: (khenidak) if we ever enable IPv6 single stack, then we should
// not wrap the following in a feature gate
ipv6 := utilnet.IsIPv6String(service.Spec.ClusterIP)
if ipv6 {
pip.PublicIPAddressVersion = network.IPv6
klog.V(2).Infof("service(%s): pip(%s) - creating as ipv6 for clusterIP:%v", serviceName, *pip.Name, service.Spec.ClusterIP)
pip.PublicIPAddressPropertiesFormat.PublicIPAllocationMethod = network.Dynamic
if az.useStandardLoadBalancer() {
// standard sku must have static allocation method for ipv6
pip.PublicIPAddressPropertiesFormat.PublicIPAllocationMethod = network.Static
}
} else {
pip.PublicIPAddressVersion = network.IPv4
klog.V(2).Infof("service(%s): pip(%s) - creating as ipv4 for clusterIP:%v", serviceName, *pip.Name, service.Spec.ClusterIP)
}
}
klog.V(2).Infof("ensurePublicIPExists for service(%s): pip(%s) - creating", serviceName, *pip.Name)
klog.V(10).Infof("CreateOrUpdatePIP(%s, %q): start", pipResourceGroup, *pip.Name)
err = az.CreateOrUpdatePIP(service, pipResourceGroup, pip)
if err != nil {
klog.V(2).Infof("ensure(%s) abort backoff: pip(%s)", serviceName, *pip.Name)
return nil, err
}
klog.V(10).Infof("CreateOrUpdatePIP(%s, %q): end", pipResourceGroup, *pip.Name)
ctx, cancel := getContextWithCancel()
defer cancel()
pip, rerr := az.PublicIPAddressesClient.Get(ctx, pipResourceGroup, *pip.Name, "")
if rerr != nil {
return nil, rerr.Error()
}
return &pip, nil
}
func getDomainNameLabel(pip *network.PublicIPAddress) string {
if pip == nil || pip.PublicIPAddressPropertiesFormat == nil || pip.PublicIPAddressPropertiesFormat.DNSSettings == nil {
return ""
}
return to.String(pip.PublicIPAddressPropertiesFormat.DNSSettings.DomainNameLabel)
}
func getIdleTimeout(s *v1.Service) (*int32, error) {
const (
min = 4
max = 30
)
val, ok := s.Annotations[ServiceAnnotationLoadBalancerIdleTimeout]
if !ok {
// Return a nil here as this will set the value to the azure default
return nil, nil
}
errInvalidTimeout := fmt.Errorf("idle timeout value must be a whole number representing minutes between %d and %d", min, max)
to, err := strconv.Atoi(val)
if err != nil {
return nil, fmt.Errorf("error parsing idle timeout value: %v: %v", err, errInvalidTimeout)
}
to32 := int32(to)
if to32 < min || to32 > max {
return nil, errInvalidTimeout
}
return &to32, nil
}
func (az *Cloud) isFrontendIPChanged(clusterName string, config network.FrontendIPConfiguration, service *v1.Service, lbFrontendIPConfigName string) (bool, error) {
if az.serviceOwnsFrontendIP(config, service) && !strings.EqualFold(to.String(config.Name), lbFrontendIPConfigName) {
return true, nil
}
if !strings.EqualFold(to.String(config.Name), lbFrontendIPConfigName) {
return false, nil
}
loadBalancerIP := service.Spec.LoadBalancerIP
isInternal := requiresInternalLoadBalancer(service)
if isInternal {
// Judge subnet
subnetName := subnet(service)
if subnetName != nil {
subnet, existsSubnet, err := az.getSubnet(az.VnetName, *subnetName)
if err != nil {
return false, err
}
if !existsSubnet {
return false, fmt.Errorf("failed to get subnet")
}
if config.Subnet != nil && !strings.EqualFold(to.String(config.Subnet.Name), to.String(subnet.Name)) {
return true, nil
}
}
if loadBalancerIP == "" {
return config.PrivateIPAllocationMethod == network.Static, nil
}
return config.PrivateIPAllocationMethod != network.Static || !strings.EqualFold(loadBalancerIP, to.String(config.PrivateIPAddress)), nil
}
if loadBalancerIP == "" {
return false, nil
}
pipName, _, err := az.determinePublicIPName(clusterName, service)
if err != nil {
return false, err
}
pipResourceGroup := az.getPublicIPAddressResourceGroup(service)
pip, existsPip, err := az.getPublicIPAddress(pipResourceGroup, pipName)
if err != nil {
return false, err
}
if !existsPip {
return true, nil
}
return config.PublicIPAddress != nil && !strings.EqualFold(to.String(pip.ID), to.String(config.PublicIPAddress.ID)), nil
}
// This ensures load balancer exists and the frontend ip config is setup.
// This also reconciles the Service's Ports with the LoadBalancer config.
// This entails adding rules/probes for expected Ports and removing stale rules/ports.
// nodes only used if wantLb is true
func (az *Cloud) reconcileLoadBalancer(clusterName string, service *v1.Service, nodes []*v1.Node, wantLb bool) (*network.LoadBalancer, error) {
isInternal := requiresInternalLoadBalancer(service)
isBackendPoolPreConfigured := az.isBackendPoolPreConfigured(service)
serviceName := getServiceName(service)
klog.V(2).Infof("reconcileLoadBalancer for service(%s) - wantLb(%t): started", serviceName, wantLb)
lb, _, _, err := az.getServiceLoadBalancer(service, clusterName, nodes, wantLb)
if err != nil {
klog.Errorf("reconcileLoadBalancer: failed to get load balancer for service %q, error: %v", serviceName, err)
return nil, err
}
lbName := *lb.Name
lbResourceGroup := az.getLoadBalancerResourceGroup()
klog.V(2).Infof("reconcileLoadBalancer for service(%s): lb(%s/%s) wantLb(%t) resolved load balancer name", serviceName, lbResourceGroup, lbName, wantLb)
lbFrontendIPConfigName := az.getFrontendIPConfigName(service)
lbFrontendIPConfigID := az.getFrontendIPConfigID(lbName, lbResourceGroup, lbFrontendIPConfigName)
lbBackendPoolName := getBackendPoolName(az.ipv6DualStackEnabled, clusterName, service)
lbBackendPoolID := az.getBackendPoolID(lbName, lbResourceGroup, lbBackendPoolName)
lbIdleTimeout, err := getIdleTimeout(service)
if wantLb && err != nil {
return nil, err
}
dirtyLb := false
// Ensure LoadBalancer's Backend Pool Configuration
if wantLb {
newBackendPools := []network.BackendAddressPool{}
if lb.BackendAddressPools != nil {
newBackendPools = *lb.BackendAddressPools
}
foundBackendPool := false
for _, bp := range newBackendPools {
if strings.EqualFold(*bp.Name, lbBackendPoolName) {
klog.V(10).Infof("reconcileLoadBalancer for service (%s)(%t): lb backendpool - found wanted backendpool. not adding anything", serviceName, wantLb)
foundBackendPool = true
break
} else {
klog.V(10).Infof("reconcileLoadBalancer for service (%s)(%t): lb backendpool - found other backendpool %s", serviceName, wantLb, *bp.Name)
}
}
if !foundBackendPool {
if isBackendPoolPreConfigured {
klog.V(2).Infof("reconcileLoadBalancer for service (%s)(%t): lb backendpool - PreConfiguredBackendPoolLoadBalancerTypes %s has been set but can not find corresponding backend pool, ignoring it",
serviceName,
wantLb,
az.PreConfiguredBackendPoolLoadBalancerTypes)
isBackendPoolPreConfigured = false
}
newBackendPools = append(newBackendPools, network.BackendAddressPool{
Name: to.StringPtr(lbBackendPoolName),
})
klog.V(10).Infof("reconcileLoadBalancer for service (%s)(%t): lb backendpool - adding backendpool", serviceName, wantLb)
dirtyLb = true
lb.BackendAddressPools = &newBackendPools
}
}
// Ensure LoadBalancer's Frontend IP Configurations
dirtyConfigs := false
newConfigs := []network.FrontendIPConfiguration{}
if lb.FrontendIPConfigurations != nil {
newConfigs = *lb.FrontendIPConfigurations
}
if !wantLb {
for i := len(newConfigs) - 1; i >= 0; i-- {
config := newConfigs[i]
if az.serviceOwnsFrontendIP(config, service) {
klog.V(2).Infof("reconcileLoadBalancer for service (%s)(%t): lb frontendconfig(%s) - dropping", serviceName, wantLb, lbFrontendIPConfigName)
newConfigs = append(newConfigs[:i], newConfigs[i+1:]...)
dirtyConfigs = true
}
}
} else {
for i := len(newConfigs) - 1; i >= 0; i-- {
config := newConfigs[i]
isFipChanged, err := az.isFrontendIPChanged(clusterName, config, service, lbFrontendIPConfigName)
if err != nil {
return nil, err
}
if isFipChanged {
klog.V(2).Infof("reconcileLoadBalancer for service (%s)(%t): lb frontendconfig(%s) - dropping", serviceName, wantLb, *config.Name)
newConfigs = append(newConfigs[:i], newConfigs[i+1:]...)
dirtyConfigs = true
}
}
foundConfig := false
for _, config := range newConfigs {
if strings.EqualFold(*config.Name, lbFrontendIPConfigName) {
foundConfig = true
break
}
}
if !foundConfig {
// construct FrontendIPConfigurationPropertiesFormat
var fipConfigurationProperties *network.FrontendIPConfigurationPropertiesFormat
if isInternal {
// azure does not support ILB for IPv6 yet.
// TODO: remove this check when ILB supports IPv6 *and* the SDK
// have been rev'ed to 2019* version
if utilnet.IsIPv6String(service.Spec.ClusterIP) {
return nil, fmt.Errorf("ensure(%s): lb(%s) - internal load balancers does not support IPv6", serviceName, lbName)
}
subnetName := subnet(service)
if subnetName == nil {
subnetName = &az.SubnetName
}
subnet, existsSubnet, err := az.getSubnet(az.VnetName, *subnetName)
if err != nil {
return nil, err
}
if !existsSubnet {
return nil, fmt.Errorf("ensure(%s): lb(%s) - failed to get subnet: %s/%s", serviceName, lbName, az.VnetName, az.SubnetName)
}
configProperties := network.FrontendIPConfigurationPropertiesFormat{
Subnet: &subnet,
}
loadBalancerIP := service.Spec.LoadBalancerIP
if loadBalancerIP != "" {
configProperties.PrivateIPAllocationMethod = network.Static
configProperties.PrivateIPAddress = &loadBalancerIP
} else {
// We'll need to call GetLoadBalancer later to retrieve allocated IP.
configProperties.PrivateIPAllocationMethod = network.Dynamic
}
fipConfigurationProperties = &configProperties
} else {
pipName, shouldPIPExisted, err := az.determinePublicIPName(clusterName, service)
if err != nil {
return nil, err
}
domainNameLabel := getPublicIPDomainNameLabel(service)
pip, err := az.ensurePublicIPExists(service, pipName, domainNameLabel, clusterName, shouldPIPExisted)
if err != nil {
return nil, err
}
fipConfigurationProperties = &network.FrontendIPConfigurationPropertiesFormat{
PublicIPAddress: &network.PublicIPAddress{ID: pip.ID},
}
}
newConfigs = append(newConfigs,
network.FrontendIPConfiguration{
Name: to.StringPtr(lbFrontendIPConfigName),
FrontendIPConfigurationPropertiesFormat: fipConfigurationProperties,
})
klog.V(10).Infof("reconcileLoadBalancer for service (%s)(%t): lb frontendconfig(%s) - adding", serviceName, wantLb, lbFrontendIPConfigName)
dirtyConfigs = true
}
}
if dirtyConfigs {
dirtyLb = true
lb.FrontendIPConfigurations = &newConfigs
}
// update probes/rules
expectedProbes, expectedRules, err := az.reconcileLoadBalancerRule(service, wantLb, lbFrontendIPConfigID, lbBackendPoolID, lbName, lbIdleTimeout)
if err != nil {
return nil, err
}
// remove unwanted probes
dirtyProbes := false
var updatedProbes []network.Probe
if lb.Probes != nil {
updatedProbes = *lb.Probes
}
for i := len(updatedProbes) - 1; i >= 0; i-- {
existingProbe := updatedProbes[i]
if az.serviceOwnsRule(service, *existingProbe.Name) {
klog.V(10).Infof("reconcileLoadBalancer for service (%s)(%t): lb probe(%s) - considering evicting", serviceName, wantLb, *existingProbe.Name)
keepProbe := false
if findProbe(expectedProbes, existingProbe) {
klog.V(10).Infof("reconcileLoadBalancer for service (%s)(%t): lb probe(%s) - keeping", serviceName, wantLb, *existingProbe.Name)
keepProbe = true
}
if !keepProbe {
updatedProbes = append(updatedProbes[:i], updatedProbes[i+1:]...)
klog.V(10).Infof("reconcileLoadBalancer for service (%s)(%t): lb probe(%s) - dropping", serviceName, wantLb, *existingProbe.Name)
dirtyProbes = true
}
}
}
// add missing, wanted probes
for _, expectedProbe := range expectedProbes {
foundProbe := false
if findProbe(updatedProbes, expectedProbe) {
klog.V(10).Infof("reconcileLoadBalancer for service (%s)(%t): lb probe(%s) - already exists", serviceName, wantLb, *expectedProbe.Name)
foundProbe = true
}
if !foundProbe {
klog.V(10).Infof("reconcileLoadBalancer for service (%s)(%t): lb probe(%s) - adding", serviceName, wantLb, *expectedProbe.Name)
updatedProbes = append(updatedProbes, expectedProbe)
dirtyProbes = true
}
}
if dirtyProbes {
dirtyLb = true
lb.Probes = &updatedProbes
}
// update rules
dirtyRules := false
var updatedRules []network.LoadBalancingRule
if lb.LoadBalancingRules != nil {
updatedRules = *lb.LoadBalancingRules
}
// update rules: remove unwanted
for i := len(updatedRules) - 1; i >= 0; i-- {
existingRule := updatedRules[i]
if az.serviceOwnsRule(service, *existingRule.Name) {
keepRule := false
klog.V(10).Infof("reconcileLoadBalancer for service (%s)(%t): lb rule(%s) - considering evicting", serviceName, wantLb, *existingRule.Name)
if findRule(expectedRules, existingRule, wantLb) {
klog.V(10).Infof("reconcileLoadBalancer for service (%s)(%t): lb rule(%s) - keeping", serviceName, wantLb, *existingRule.Name)
keepRule = true
}
if !keepRule {
klog.V(2).Infof("reconcileLoadBalancer for service (%s)(%t): lb rule(%s) - dropping", serviceName, wantLb, *existingRule.Name)
updatedRules = append(updatedRules[:i], updatedRules[i+1:]...)
dirtyRules = true
}
}
}
// update rules: add needed
for _, expectedRule := range expectedRules {
foundRule := false
if findRule(updatedRules, expectedRule, wantLb) {
klog.V(10).Infof("reconcileLoadBalancer for service (%s)(%t): lb rule(%s) - already exists", serviceName, wantLb, *expectedRule.Name)
foundRule = true
}
if !foundRule {
klog.V(10).Infof("reconcileLoadBalancer for service (%s)(%t): lb rule(%s) adding", serviceName, wantLb, *expectedRule.Name)
updatedRules = append(updatedRules, expectedRule)
dirtyRules = true
}
}
if dirtyRules {
dirtyLb = true
lb.LoadBalancingRules = &updatedRules
}
// We don't care if the LB exists or not
// We only care about if there is any change in the LB, which means dirtyLB
// If it is not exist, and no change to that, we don't CreateOrUpdate LB
if dirtyLb {
if lb.FrontendIPConfigurations == nil || len(*lb.FrontendIPConfigurations) == 0 {
if isBackendPoolPreConfigured {
klog.V(2).Infof("reconcileLoadBalancer for service(%s): lb(%s) - ignore cleanup of dirty lb because the lb is pre-confiruged", serviceName, lbName)
} else {
// When FrontendIPConfigurations is empty, we need to delete the Azure load balancer resource itself,
// because an Azure load balancer cannot have an empty FrontendIPConfigurations collection
klog.V(2).Infof("reconcileLoadBalancer for service(%s): lb(%s) - deleting; no remaining frontendIPConfigurations", serviceName, lbName)
// Remove backend pools from vmSets. This is required for virtual machine scale sets before removing the LB.
vmSetName := az.mapLoadBalancerNameToVMSet(lbName, clusterName)
klog.V(10).Infof("EnsureBackendPoolDeleted(%s,%s) for service %s: start", lbBackendPoolID, vmSetName, serviceName)
err := az.vmSet.EnsureBackendPoolDeleted(service, lbBackendPoolID, vmSetName, lb.BackendAddressPools)
if err != nil {
klog.Errorf("EnsureBackendPoolDeleted(%s) for service %s failed: %v", lbBackendPoolID, serviceName, err)
return nil, err
}
klog.V(10).Infof("EnsureBackendPoolDeleted(%s) for service %s: end", lbBackendPoolID, serviceName)
// Remove the LB.
klog.V(10).Infof("reconcileLoadBalancer: az.DeleteLB(%q): start", lbName)
err = az.DeleteLB(service, lbName)
if err != nil {
klog.V(2).Infof("reconcileLoadBalancer for service(%s) abort backoff: lb(%s) - deleting; no remaining frontendIPConfigurations", serviceName, lbName)
return nil, err
}
klog.V(10).Infof("az.DeleteLB(%q): end", lbName)
}
} else {
klog.V(2).Infof("reconcileLoadBalancer: reconcileLoadBalancer for service(%s): lb(%s) - updating", serviceName, lbName)
err := az.CreateOrUpdateLB(service, *lb)
if err != nil {
klog.V(2).Infof("reconcileLoadBalancer for service(%s) abort backoff: lb(%s) - updating", serviceName, lbName)
return nil, err
}
if isInternal {
// Refresh updated lb which will be used later in other places.
newLB, exist, err := az.getAzureLoadBalancer(lbName, cacheReadTypeDefault)
if err != nil {
klog.V(2).Infof("reconcileLoadBalancer for service(%s): getAzureLoadBalancer(%s) failed: %v", serviceName, lbName, err)
return nil, err
}
if !exist {
return nil, fmt.Errorf("load balancer %q not found", lbName)
}
lb = &newLB
}
}
}
if wantLb && nodes != nil && !isBackendPoolPreConfigured {
// Add the machines to the backend pool if they're not already
vmSetName := az.mapLoadBalancerNameToVMSet(lbName, clusterName)
// Etag would be changed when updating backend pools, so invalidate lbCache after it.
defer az.lbCache.Delete(lbName)
err := az.vmSet.EnsureHostsInPool(service, nodes, lbBackendPoolID, vmSetName, isInternal)
if err != nil {
return nil, err
}
}
klog.V(2).Infof("reconcileLoadBalancer for service(%s): lb(%s) finished", serviceName, lbName)
return lb, nil
}
func (az *Cloud) reconcileLoadBalancerRule(
service *v1.Service,
wantLb bool,
lbFrontendIPConfigID string,
lbBackendPoolID string,
lbName string,
lbIdleTimeout *int32) ([]network.Probe, []network.LoadBalancingRule, error) {
var ports []v1.ServicePort
if wantLb {
ports = service.Spec.Ports
} else {
ports = []v1.ServicePort{}
}
var enableTCPReset *bool
if az.useStandardLoadBalancer() {
enableTCPReset = to.BoolPtr(true)
if v, ok := service.Annotations[ServiceAnnotationLoadBalancerDisableTCPReset]; ok {
klog.V(2).Infof("reconcileLoadBalancerRule lb name (%s) flag(%s) is set to %s", lbName, ServiceAnnotationLoadBalancerDisableTCPReset, v)
enableTCPReset = to.BoolPtr(!strings.EqualFold(v, "true"))
}
}
var expectedProbes []network.Probe
var expectedRules []network.LoadBalancingRule
for _, port := range ports {
protocols := []v1.Protocol{port.Protocol}
if v, ok := service.Annotations[ServiceAnnotationLoadBalancerMixedProtocols]; ok && v == "true" {
klog.V(2).Infof("reconcileLoadBalancerRule lb name (%s) flag(%s) is set", lbName, ServiceAnnotationLoadBalancerMixedProtocols)
if port.Protocol == v1.ProtocolTCP {
protocols = append(protocols, v1.ProtocolUDP)
} else if port.Protocol == v1.ProtocolUDP {
protocols = append(protocols, v1.ProtocolTCP)
}
}
for _, protocol := range protocols {
lbRuleName := az.getLoadBalancerRuleName(service, protocol, port.Port)
klog.V(2).Infof("reconcileLoadBalancerRule lb name (%s) rule name (%s)", lbName, lbRuleName)
transportProto, _, probeProto, err := getProtocolsFromKubernetesProtocol(protocol)
if err != nil {
return expectedProbes, expectedRules, err
}
if servicehelpers.NeedsHealthCheck(service) {
podPresencePath, podPresencePort := servicehelpers.GetServiceHealthCheckPathPort(service)
expectedProbes = append(expectedProbes, network.Probe{
Name: &lbRuleName,
ProbePropertiesFormat: &network.ProbePropertiesFormat{
RequestPath: to.StringPtr(podPresencePath),
Protocol: network.ProbeProtocolHTTP,
Port: to.Int32Ptr(podPresencePort),
IntervalInSeconds: to.Int32Ptr(5),
NumberOfProbes: to.Int32Ptr(2),
},
})
} else if protocol != v1.ProtocolUDP && protocol != v1.ProtocolSCTP {
// we only add the expected probe if we're doing TCP
expectedProbes = append(expectedProbes, network.Probe{
Name: &lbRuleName,
ProbePropertiesFormat: &network.ProbePropertiesFormat{
Protocol: *probeProto,
Port: to.Int32Ptr(port.NodePort),
IntervalInSeconds: to.Int32Ptr(5),
NumberOfProbes: to.Int32Ptr(2),
},
})
}
loadDistribution := network.LoadDistributionDefault
if service.Spec.SessionAffinity == v1.ServiceAffinityClientIP {
loadDistribution = network.LoadDistributionSourceIP
}
expectedRule := network.LoadBalancingRule{
Name: &lbRuleName,
LoadBalancingRulePropertiesFormat: &network.LoadBalancingRulePropertiesFormat{
Protocol: *transportProto,
FrontendIPConfiguration: &network.SubResource{
ID: to.StringPtr(lbFrontendIPConfigID),
},
BackendAddressPool: &network.SubResource{
ID: to.StringPtr(lbBackendPoolID),
},
LoadDistribution: loadDistribution,
FrontendPort: to.Int32Ptr(port.Port),
BackendPort: to.Int32Ptr(port.Port),
DisableOutboundSnat: to.BoolPtr(az.disableLoadBalancerOutboundSNAT()),
EnableTCPReset: enableTCPReset,
},
}
// LB does not support floating IPs for IPV6 rules
if utilnet.IsIPv6String(service.Spec.ClusterIP) {
expectedRule.BackendPort = to.Int32Ptr(port.NodePort)
expectedRule.EnableFloatingIP = to.BoolPtr(false)
} else {
expectedRule.EnableFloatingIP = to.BoolPtr(true)
}
if protocol == v1.ProtocolTCP {
expectedRule.LoadBalancingRulePropertiesFormat.IdleTimeoutInMinutes = lbIdleTimeout
}
// we didn't construct the probe objects for UDP or SCTP because they're not allowed on Azure.
// However, when externalTrafficPolicy is Local, Kubernetes HTTP health check would be used for probing.
if servicehelpers.NeedsHealthCheck(service) || (protocol != v1.ProtocolUDP && protocol != v1.ProtocolSCTP) {
expectedRule.Probe = &network.SubResource{
ID: to.StringPtr(az.getLoadBalancerProbeID(lbName, az.getLoadBalancerResourceGroup(), lbRuleName)),
}
}
expectedRules = append(expectedRules, expectedRule)
}
}
return expectedProbes, expectedRules, nil
}
// This reconciles the Network Security Group similar to how the LB is reconciled.
// This entails adding required, missing SecurityRules and removing stale rules.
func (az *Cloud) reconcileSecurityGroup(clusterName string, service *v1.Service, lbIP *string, wantLb bool) (*network.SecurityGroup, error) {
serviceName := getServiceName(service)
klog.V(5).Infof("reconcileSecurityGroup(%s): START clusterName=%q", serviceName, clusterName)
ports := service.Spec.Ports
if ports == nil {
if useSharedSecurityRule(service) {
klog.V(2).Infof("Attempting to reconcile security group for service %s, but service uses shared rule and we don't know which port it's for", service.Name)
return nil, fmt.Errorf("No port info for reconciling shared rule for service %s", service.Name)
}
ports = []v1.ServicePort{}
}
sg, err := az.getSecurityGroup(cacheReadTypeDefault)
if err != nil {
return nil, err
}
destinationIPAddress := ""
if wantLb && lbIP == nil {
return nil, fmt.Errorf("No load balancer IP for setting up security rules for service %s", service.Name)
}
if lbIP != nil {
destinationIPAddress = *lbIP
}
if destinationIPAddress == "" {
destinationIPAddress = "*"
}
sourceRanges, err := servicehelpers.GetLoadBalancerSourceRanges(service)
if err != nil {
return nil, err
}
serviceTags := getServiceTags(service)
var sourceAddressPrefixes []string
if (sourceRanges == nil || servicehelpers.IsAllowAll(sourceRanges)) && len(serviceTags) == 0 {
if !requiresInternalLoadBalancer(service) {
sourceAddressPrefixes = []string{"Internet"}
}
} else {
for _, ip := range sourceRanges {
sourceAddressPrefixes = append(sourceAddressPrefixes, ip.String())
}
sourceAddressPrefixes = append(sourceAddressPrefixes, serviceTags...)
}
expectedSecurityRules := []network.SecurityRule{}
if wantLb {
expectedSecurityRules = make([]network.SecurityRule, len(ports)*len(sourceAddressPrefixes))
for i, port := range ports {
_, securityProto, _, err := getProtocolsFromKubernetesProtocol(port.Protocol)
if err != nil {
return nil, err
}
for j := range sourceAddressPrefixes {
ix := i*len(sourceAddressPrefixes) + j
securityRuleName := az.getSecurityRuleName(service, port, sourceAddressPrefixes[j])
expectedSecurityRules[ix] = network.SecurityRule{
Name: to.StringPtr(securityRuleName),
SecurityRulePropertiesFormat: &network.SecurityRulePropertiesFormat{
Protocol: *securityProto,
SourcePortRange: to.StringPtr("*"),
DestinationPortRange: to.StringPtr(strconv.Itoa(int(port.Port))),
SourceAddressPrefix: to.StringPtr(sourceAddressPrefixes[j]),
DestinationAddressPrefix: to.StringPtr(destinationIPAddress),
Access: network.SecurityRuleAccessAllow,
Direction: network.SecurityRuleDirectionInbound,
},
}
}
}
}
for _, r := range expectedSecurityRules {
klog.V(10).Infof("Expecting security rule for %s: %s:%s -> %s:%s", service.Name, *r.SourceAddressPrefix, *r.SourcePortRange, *r.DestinationAddressPrefix, *r.DestinationPortRange)
}
// update security rules
dirtySg := false
var updatedRules []network.SecurityRule
if sg.SecurityGroupPropertiesFormat != nil && sg.SecurityGroupPropertiesFormat.SecurityRules != nil {
updatedRules = *sg.SecurityGroupPropertiesFormat.SecurityRules
}
for _, r := range updatedRules {
klog.V(10).Infof("Existing security rule while processing %s: %s:%s -> %s:%s", service.Name, logSafe(r.SourceAddressPrefix), logSafe(r.SourcePortRange), logSafeCollection(r.DestinationAddressPrefix, r.DestinationAddressPrefixes), logSafe(r.DestinationPortRange))
}
// update security rules: remove unwanted rules that belong privately
// to this service
for i := len(updatedRules) - 1; i >= 0; i-- {
existingRule := updatedRules[i]
if az.serviceOwnsRule(service, *existingRule.Name) {
klog.V(10).Infof("reconcile(%s)(%t): sg rule(%s) - considering evicting", serviceName, wantLb, *existingRule.Name)
keepRule := false
if findSecurityRule(expectedSecurityRules, existingRule) {
klog.V(10).Infof("reconcile(%s)(%t): sg rule(%s) - keeping", serviceName, wantLb, *existingRule.Name)
keepRule = true
}
if !keepRule {
klog.V(10).Infof("reconcile(%s)(%t): sg rule(%s) - dropping", serviceName, wantLb, *existingRule.Name)
updatedRules = append(updatedRules[:i], updatedRules[i+1:]...)
dirtySg = true
}
}
}
// update security rules: if the service uses a shared rule and is being deleted,
// then remove it from the shared rule
if useSharedSecurityRule(service) && !wantLb {
for _, port := range ports {
for _, sourceAddressPrefix := range sourceAddressPrefixes {
sharedRuleName := az.getSecurityRuleName(service, port, sourceAddressPrefix)
sharedIndex, sharedRule, sharedRuleFound := findSecurityRuleByName(updatedRules, sharedRuleName)
if !sharedRuleFound {
klog.V(4).Infof("Expected to find shared rule %s for service %s being deleted, but did not", sharedRuleName, service.Name)
return nil, fmt.Errorf("Expected to find shared rule %s for service %s being deleted, but did not", sharedRuleName, service.Name)
}
if sharedRule.DestinationAddressPrefixes == nil {
klog.V(4).Infof("Expected to have array of destinations in shared rule for service %s being deleted, but did not", service.Name)
return nil, fmt.Errorf("Expected to have array of destinations in shared rule for service %s being deleted, but did not", service.Name)
}
existingPrefixes := *sharedRule.DestinationAddressPrefixes
addressIndex, found := findIndex(existingPrefixes, destinationIPAddress)
if !found {
klog.V(4).Infof("Expected to find destination address %s in shared rule %s for service %s being deleted, but did not", destinationIPAddress, sharedRuleName, service.Name)
return nil, fmt.Errorf("Expected to find destination address %s in shared rule %s for service %s being deleted, but did not", destinationIPAddress, sharedRuleName, service.Name)
}
if len(existingPrefixes) == 1 {
updatedRules = append(updatedRules[:sharedIndex], updatedRules[sharedIndex+1:]...)
} else {
newDestinations := append(existingPrefixes[:addressIndex], existingPrefixes[addressIndex+1:]...)
sharedRule.DestinationAddressPrefixes = &newDestinations
updatedRules[sharedIndex] = sharedRule
}
dirtySg = true
}
}
}
// update security rules: prepare rules for consolidation
for index, rule := range updatedRules {
if allowsConsolidation(rule) {
updatedRules[index] = makeConsolidatable(rule)
}
}
for index, rule := range expectedSecurityRules {
if allowsConsolidation(rule) {
expectedSecurityRules[index] = makeConsolidatable(rule)
}
}
// update security rules: add needed
for _, expectedRule := range expectedSecurityRules {
foundRule := false
if findSecurityRule(updatedRules, expectedRule) {
klog.V(10).Infof("reconcile(%s)(%t): sg rule(%s) - already exists", serviceName, wantLb, *expectedRule.Name)
foundRule = true
}
if foundRule && allowsConsolidation(expectedRule) {
index, _ := findConsolidationCandidate(updatedRules, expectedRule)
updatedRules[index] = consolidate(updatedRules[index], expectedRule)
dirtySg = true
}
if !foundRule {
klog.V(10).Infof("reconcile(%s)(%t): sg rule(%s) - adding", serviceName, wantLb, *expectedRule.Name)
nextAvailablePriority, err := getNextAvailablePriority(updatedRules)
if err != nil {
return nil, err
}
expectedRule.Priority = to.Int32Ptr(nextAvailablePriority)
updatedRules = append(updatedRules, expectedRule)
dirtySg = true
}
}
for _, r := range updatedRules {
klog.V(10).Infof("Updated security rule while processing %s: %s:%s -> %s:%s", service.Name, logSafe(r.SourceAddressPrefix), logSafe(r.SourcePortRange), logSafeCollection(r.DestinationAddressPrefix, r.DestinationAddressPrefixes), logSafe(r.DestinationPortRange))
}
if dirtySg {
sg.SecurityRules = &updatedRules
klog.V(2).Infof("reconcileSecurityGroup for service(%s): sg(%s) - updating", serviceName, *sg.Name)
klog.V(10).Infof("CreateOrUpdateSecurityGroup(%q): start", *sg.Name)
err := az.CreateOrUpdateSecurityGroup(service, sg)
if err != nil {
klog.V(2).Infof("ensure(%s) abort backoff: sg(%s) - updating", serviceName, *sg.Name)
return nil, err
}
klog.V(10).Infof("CreateOrUpdateSecurityGroup(%q): end", *sg.Name)
}
return &sg, nil
}
func (az *Cloud) shouldUpdateLoadBalancer(clusterName string, service *v1.Service) bool {
_, _, existsLb, _ := az.getServiceLoadBalancer(service, clusterName, nil, false)
return existsLb && service.ObjectMeta.DeletionTimestamp == nil
}
func logSafe(s *string) string {
if s == nil {
return "(nil)"
}
return *s
}
func logSafeCollection(s *string, strs *[]string) string {
if s == nil {
if strs == nil {
return "(nil)"
}
return "[" + strings.Join(*strs, ",") + "]"
}
return *s
}
func findSecurityRuleByName(rules []network.SecurityRule, ruleName string) (int, network.SecurityRule, bool) {
for index, rule := range rules {
if rule.Name != nil && strings.EqualFold(*rule.Name, ruleName) {
return index, rule, true
}
}
return 0, network.SecurityRule{}, false
}
func findIndex(strs []string, s string) (int, bool) {
for index, str := range strs {
if strings.EqualFold(str, s) {
return index, true
}
}
return 0, false
}
func allowsConsolidation(rule network.SecurityRule) bool {
return strings.HasPrefix(to.String(rule.Name), "shared")
}
func findConsolidationCandidate(rules []network.SecurityRule, rule network.SecurityRule) (int, bool) {
for index, r := range rules {
if allowsConsolidation(r) {
if strings.EqualFold(to.String(r.Name), to.String(rule.Name)) {
return index, true
}
}
}
return 0, false
}
func makeConsolidatable(rule network.SecurityRule) network.SecurityRule {
return network.SecurityRule{
Name: rule.Name,
SecurityRulePropertiesFormat: &network.SecurityRulePropertiesFormat{
Priority: rule.Priority,
Protocol: rule.Protocol,
SourcePortRange: rule.SourcePortRange,
SourcePortRanges: rule.SourcePortRanges,
DestinationPortRange: rule.DestinationPortRange,
DestinationPortRanges: rule.DestinationPortRanges,
SourceAddressPrefix: rule.SourceAddressPrefix,
SourceAddressPrefixes: rule.SourceAddressPrefixes,
DestinationAddressPrefixes: collectionOrSingle(rule.DestinationAddressPrefixes, rule.DestinationAddressPrefix),
Access: rule.Access,
Direction: rule.Direction,
},
}
}
func consolidate(existingRule network.SecurityRule, newRule network.SecurityRule) network.SecurityRule {
destinations := appendElements(existingRule.SecurityRulePropertiesFormat.DestinationAddressPrefixes, newRule.DestinationAddressPrefix, newRule.DestinationAddressPrefixes)
destinations = deduplicate(destinations) // there are transient conditions during controller startup where it tries to add a service that is already added
return network.SecurityRule{
Name: existingRule.Name,
SecurityRulePropertiesFormat: &network.SecurityRulePropertiesFormat{
Priority: existingRule.Priority,
Protocol: existingRule.Protocol,
SourcePortRange: existingRule.SourcePortRange,
SourcePortRanges: existingRule.SourcePortRanges,
DestinationPortRange: existingRule.DestinationPortRange,
DestinationPortRanges: existingRule.DestinationPortRanges,
SourceAddressPrefix: existingRule.SourceAddressPrefix,
SourceAddressPrefixes: existingRule.SourceAddressPrefixes,
DestinationAddressPrefixes: destinations,
Access: existingRule.Access,
Direction: existingRule.Direction,
},
}
}
func collectionOrSingle(collection *[]string, s *string) *[]string {
if collection != nil && len(*collection) > 0 {
return collection
}
if s == nil {
return &[]string{}
}
return &[]string{*s}
}
func appendElements(collection *[]string, appendString *string, appendStrings *[]string) *[]string {
newCollection := []string{}
if collection != nil {
newCollection = append(newCollection, *collection...)
}
if appendString != nil {
newCollection = append(newCollection, *appendString)
}
if appendStrings != nil {
newCollection = append(newCollection, *appendStrings...)
}
return &newCollection
}
func deduplicate(collection *[]string) *[]string {
if collection == nil {
return nil
}
seen := map[string]bool{}
result := make([]string, 0, len(*collection))
for _, v := range *collection {
if seen[v] == true {
// skip this element
} else {
seen[v] = true
result = append(result, v)
}
}
return &result
}
// This reconciles the PublicIP resources similar to how the LB is reconciled.
func (az *Cloud) reconcilePublicIP(clusterName string, service *v1.Service, lbName string, wantLb bool) (*network.PublicIPAddress, error) {
isInternal := requiresInternalLoadBalancer(service)
serviceName := getServiceName(service)
var lb *network.LoadBalancer
var desiredPipName string
var err error
var shouldPIPExisted bool
if !isInternal && wantLb {
desiredPipName, shouldPIPExisted, err = az.determinePublicIPName(clusterName, service)
if err != nil {
return nil, err
}
}
if lbName != "" {
loadBalancer, _, err := az.getAzureLoadBalancer(lbName, cacheReadTypeDefault)
if err != nil {
return nil, err
}
lb = &loadBalancer
}
pipResourceGroup := az.getPublicIPAddressResourceGroup(service)
pips, err := az.ListPIP(service, pipResourceGroup)
if err != nil {
return nil, err
}
var found bool
var pipsToBeDeleted []*network.PublicIPAddress
for i := range pips {
pip := pips[i]
pipName := *pip.Name
if serviceOwnsPublicIP(&pip, clusterName, serviceName) {
// We need to process for pips belong to this service
if wantLb && !isInternal && pipName == desiredPipName {
// This is the only case we should preserve the
// Public ip resource with match service tag
found = true
} else {
pipsToBeDeleted = append(pipsToBeDeleted, &pip)
}
} else if wantLb && !isInternal && pipName == desiredPipName {
found = true
}
}
if !isInternal && shouldPIPExisted && !found && wantLb {
return nil, fmt.Errorf("reconcilePublicIP for service(%s): pip(%s) not found", serviceName, desiredPipName)
}
var deleteFuncs []func() error
for _, pip := range pipsToBeDeleted {
pipCopy := *pip
deleteFuncs = append(deleteFuncs, func() error {
klog.V(2).Infof("reconcilePublicIP for service(%s): pip(%s) - deleting", serviceName, *pip.Name)
return az.safeDeletePublicIP(service, pipResourceGroup, &pipCopy, lb)
})
}
errs := utilerrors.AggregateGoroutines(deleteFuncs...)
if errs != nil {
return nil, utilerrors.Flatten(errs)
}
if !isInternal && wantLb {
// Confirm desired public ip resource exists
var pip *network.PublicIPAddress
domainNameLabel := getPublicIPDomainNameLabel(service)
if pip, err = az.ensurePublicIPExists(service, desiredPipName, domainNameLabel, clusterName, shouldPIPExisted); err != nil {
return nil, err
}
return pip, nil
}
return nil, nil
}
// safeDeletePublicIP deletes public IP by removing its reference first.
func (az *Cloud) safeDeletePublicIP(service *v1.Service, pipResourceGroup string, pip *network.PublicIPAddress, lb *network.LoadBalancer) error {
// Remove references if pip.IPConfiguration is not nil.
if pip.PublicIPAddressPropertiesFormat != nil &&
pip.PublicIPAddressPropertiesFormat.IPConfiguration != nil &&
lb != nil && lb.LoadBalancerPropertiesFormat != nil &&
lb.LoadBalancerPropertiesFormat.FrontendIPConfigurations != nil {
referencedLBRules := []network.SubResource{}
frontendIPConfigUpdated := false
loadBalancerRuleUpdated := false
// Check whether there are still frontend IP configurations referring to it.
ipConfigurationID := to.String(pip.PublicIPAddressPropertiesFormat.IPConfiguration.ID)
if ipConfigurationID != "" {
lbFrontendIPConfigs := *lb.LoadBalancerPropertiesFormat.FrontendIPConfigurations
for i := len(lbFrontendIPConfigs) - 1; i >= 0; i-- {
config := lbFrontendIPConfigs[i]
if strings.EqualFold(ipConfigurationID, to.String(config.ID)) {
if config.FrontendIPConfigurationPropertiesFormat != nil &&
config.FrontendIPConfigurationPropertiesFormat.LoadBalancingRules != nil {
referencedLBRules = *config.FrontendIPConfigurationPropertiesFormat.LoadBalancingRules
}
frontendIPConfigUpdated = true
lbFrontendIPConfigs = append(lbFrontendIPConfigs[:i], lbFrontendIPConfigs[i+1:]...)
break
}
}
if frontendIPConfigUpdated {
lb.LoadBalancerPropertiesFormat.FrontendIPConfigurations = &lbFrontendIPConfigs
}
}
// Check whether there are still load balancer rules referring to it.
if len(referencedLBRules) > 0 {
referencedLBRuleIDs := sets.NewString()
for _, refer := range referencedLBRules {
referencedLBRuleIDs.Insert(to.String(refer.ID))
}
if lb.LoadBalancerPropertiesFormat.LoadBalancingRules != nil {
lbRules := *lb.LoadBalancerPropertiesFormat.LoadBalancingRules
for i := len(lbRules) - 1; i >= 0; i-- {
ruleID := to.String(lbRules[i].ID)
if ruleID != "" && referencedLBRuleIDs.Has(ruleID) {
loadBalancerRuleUpdated = true
lbRules = append(lbRules[:i], lbRules[i+1:]...)
}
}
if loadBalancerRuleUpdated {
lb.LoadBalancerPropertiesFormat.LoadBalancingRules = &lbRules
}
}
}
// Update load balancer when frontendIPConfigUpdated or loadBalancerRuleUpdated.
if frontendIPConfigUpdated || loadBalancerRuleUpdated {
err := az.CreateOrUpdateLB(service, *lb)
if err != nil {
klog.Errorf("safeDeletePublicIP for service(%s) failed with error: %v", getServiceName(service), err)
return err
}
}
}
pipName := to.String(pip.Name)
klog.V(10).Infof("DeletePublicIP(%s, %q): start", pipResourceGroup, pipName)
err := az.DeletePublicIP(service, pipResourceGroup, pipName)
if err != nil {
return err
}
klog.V(10).Infof("DeletePublicIP(%s, %q): end", pipResourceGroup, pipName)
return nil
}
func findProbe(probes []network.Probe, probe network.Probe) bool {
for _, existingProbe := range probes {
if strings.EqualFold(to.String(existingProbe.Name), to.String(probe.Name)) && to.Int32(existingProbe.Port) == to.Int32(probe.Port) {
return true
}
}
return false
}
func findRule(rules []network.LoadBalancingRule, rule network.LoadBalancingRule, wantLB bool) bool {
for _, existingRule := range rules {
if strings.EqualFold(to.String(existingRule.Name), to.String(rule.Name)) &&
equalLoadBalancingRulePropertiesFormat(existingRule.LoadBalancingRulePropertiesFormat, rule.LoadBalancingRulePropertiesFormat, wantLB) {
return true
}
}
return false
}
// equalLoadBalancingRulePropertiesFormat checks whether the provided LoadBalancingRulePropertiesFormat are equal.
// Note: only fields used in reconcileLoadBalancer are considered.
func equalLoadBalancingRulePropertiesFormat(s *network.LoadBalancingRulePropertiesFormat, t *network.LoadBalancingRulePropertiesFormat, wantLB bool) bool {
if s == nil || t == nil {
return false
}
properties := reflect.DeepEqual(s.Protocol, t.Protocol) &&
reflect.DeepEqual(s.FrontendIPConfiguration, t.FrontendIPConfiguration) &&
reflect.DeepEqual(s.BackendAddressPool, t.BackendAddressPool) &&
reflect.DeepEqual(s.LoadDistribution, t.LoadDistribution) &&
reflect.DeepEqual(s.FrontendPort, t.FrontendPort) &&
reflect.DeepEqual(s.BackendPort, t.BackendPort) &&
reflect.DeepEqual(s.EnableFloatingIP, t.EnableFloatingIP) &&
reflect.DeepEqual(s.EnableTCPReset, t.EnableTCPReset) &&
reflect.DeepEqual(s.DisableOutboundSnat, t.DisableOutboundSnat)
if wantLB && s.IdleTimeoutInMinutes != nil && t.IdleTimeoutInMinutes != nil {
return properties && reflect.DeepEqual(s.IdleTimeoutInMinutes, t.IdleTimeoutInMinutes)
}
return properties
}
// This compares rule's Name, Protocol, SourcePortRange, DestinationPortRange, SourceAddressPrefix, Access, and Direction.
// Note that it compares rule's DestinationAddressPrefix only when it's not consolidated rule as such rule does not have DestinationAddressPrefix defined.
// We intentionally do not compare DestinationAddressPrefixes in consolidated case because reconcileSecurityRule has to consider the two rules equal,
// despite different DestinationAddressPrefixes, in order to give it a chance to consolidate the two rules.
func findSecurityRule(rules []network.SecurityRule, rule network.SecurityRule) bool {
for _, existingRule := range rules {
if !strings.EqualFold(to.String(existingRule.Name), to.String(rule.Name)) {
continue
}
if existingRule.Protocol != rule.Protocol {
continue
}
if !strings.EqualFold(to.String(existingRule.SourcePortRange), to.String(rule.SourcePortRange)) {
continue
}
if !strings.EqualFold(to.String(existingRule.DestinationPortRange), to.String(rule.DestinationPortRange)) {
continue
}
if !strings.EqualFold(to.String(existingRule.SourceAddressPrefix), to.String(rule.SourceAddressPrefix)) {
continue
}
if !allowsConsolidation(existingRule) && !allowsConsolidation(rule) {
if !strings.EqualFold(to.String(existingRule.DestinationAddressPrefix), to.String(rule.DestinationAddressPrefix)) {
continue
}
}
if existingRule.Access != rule.Access {
continue
}
if existingRule.Direction != rule.Direction {
continue
}
return true
}
return false
}
func (az *Cloud) getPublicIPAddressResourceGroup(service *v1.Service) string {
if resourceGroup, found := service.Annotations[ServiceAnnotationLoadBalancerResourceGroup]; found {
resourceGroupName := strings.TrimSpace(resourceGroup)
if len(resourceGroupName) > 0 {
return resourceGroupName
}
}
return az.ResourceGroup
}
func (az *Cloud) isBackendPoolPreConfigured(service *v1.Service) bool {
preConfigured := false
isInternal := requiresInternalLoadBalancer(service)
if az.PreConfiguredBackendPoolLoadBalancerTypes == PreConfiguredBackendPoolLoadBalancerTypesAll {
preConfigured = true
}
if (az.PreConfiguredBackendPoolLoadBalancerTypes == PreConfiguredBackendPoolLoadBalancerTypesInteral) && isInternal {
preConfigured = true
}
if (az.PreConfiguredBackendPoolLoadBalancerTypes == PreConfiguredBackendPoolLoadBalancerTypesExternal) && !isInternal {
preConfigured = true
}
return preConfigured
}
// Check if service requires an internal load balancer.
func requiresInternalLoadBalancer(service *v1.Service) bool {
if l, found := service.Annotations[ServiceAnnotationLoadBalancerInternal]; found {
return l == "true"
}
return false
}
func subnet(service *v1.Service) *string {
if requiresInternalLoadBalancer(service) {
if l, found := service.Annotations[ServiceAnnotationLoadBalancerInternalSubnet]; found && strings.TrimSpace(l) != "" {
return &l
}
}
return nil
}
// getServiceLoadBalancerMode parses the mode value.
// if the value is __auto__ it returns isAuto = TRUE.
// if anything else it returns the unique VM set names after triming spaces.
func getServiceLoadBalancerMode(service *v1.Service) (hasMode bool, isAuto bool, vmSetNames []string) {
mode, hasMode := service.Annotations[ServiceAnnotationLoadBalancerMode]
mode = strings.TrimSpace(mode)
isAuto = strings.EqualFold(mode, ServiceAnnotationLoadBalancerAutoModeValue)
if !isAuto {
// Break up list of "AS1,AS2"
vmSetParsedList := strings.Split(mode, ",")
// Trim the VM set names and remove duplicates
// e.g. {"AS1"," AS2", "AS3", "AS3"} => {"AS1", "AS2", "AS3"}
vmSetNameSet := sets.NewString()
for _, v := range vmSetParsedList {
vmSetNameSet.Insert(strings.TrimSpace(v))
}
vmSetNames = vmSetNameSet.List()
}
return hasMode, isAuto, vmSetNames
}
func useSharedSecurityRule(service *v1.Service) bool {
if l, ok := service.Annotations[ServiceAnnotationSharedSecurityRule]; ok {
return l == "true"
}
return false
}
func getServiceTags(service *v1.Service) []string {
if service == nil {
return nil
}
if serviceTags, found := service.Annotations[ServiceAnnotationAllowedServiceTag]; found {
result := []string{}
tags := strings.Split(strings.TrimSpace(serviceTags), ",")
for _, tag := range tags {
serviceTag := strings.TrimSpace(tag)
if serviceTag != "" {
result = append(result, serviceTag)
}
}
return result
}
return nil
}
func serviceOwnsPublicIP(pip *network.PublicIPAddress, clusterName, serviceName string) bool {
if pip != nil && pip.Tags != nil {
serviceTag := pip.Tags[serviceTagKey]
clusterTag := pip.Tags[clusterNameKey]
if serviceTag != nil && *serviceTag == serviceName {
// Backward compatible for clusters upgraded from old releases.
// In such case, only "service" tag is set.
if clusterTag == nil {
return true
}
// If cluster name tag is set, then return true if it matches.
if *clusterTag == clusterName {
return true
}
}
}
return false
}
| {
"content_hash": "fa8215e05c7e1081fab8cc2b62cdf532",
"timestamp": "",
"source": "github",
"line_count": 1789,
"max_line_length": 264,
"avg_line_length": 38.71101173840134,
"alnum_prop": 0.72979178098016,
"repo_name": "frodenas/kubernetes",
"id": "995a4000c9b8d0cb4094d17cf336371f235d6dc6",
"size": "69823",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "staging/src/k8s.io/legacy-cloud-providers/azure/azure_loadbalancer.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2840"
},
{
"name": "Dockerfile",
"bytes": "50433"
},
{
"name": "Go",
"bytes": "52306592"
},
{
"name": "HTML",
"bytes": "38"
},
{
"name": "Lua",
"bytes": "17200"
},
{
"name": "Makefile",
"bytes": "65376"
},
{
"name": "PowerShell",
"bytes": "119046"
},
{
"name": "Python",
"bytes": "3641505"
},
{
"name": "Ruby",
"bytes": "413"
},
{
"name": "Shell",
"bytes": "1578354"
},
{
"name": "sed",
"bytes": "1390"
}
],
"symlink_target": ""
} |
package com.thoughtworks.cruise.page.edit;
import com.thoughtworks.cruise.Regex;
import com.thoughtworks.cruise.page.CruisePage;
import com.thoughtworks.cruise.state.ConfigState;
import com.thoughtworks.cruise.state.CurrentPageState;
import com.thoughtworks.cruise.state.CurrentPageState.Page;
import com.thoughtworks.cruise.state.ScenarioState;
import com.thoughtworks.cruise.util.CommaSeparatedParams;
import com.thoughtworks.cruise.utils.Assertions;
import com.thoughtworks.cruise.utils.Timeout;
import net.sf.sahi.client.Browser;
import net.sf.sahi.client.ElementStub;
import org.hamcrest.core.Is;
import org.hamcrest.text.StringContains;
import static junit.framework.Assert.assertEquals;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertTrue;
public class AlreadyOnTemplatesListingTab extends CruisePage {
private Browser browser;
private CurrentPageState currentPageState;
public AlreadyOnTemplatesListingTab(Browser browser, ScenarioState state, CurrentPageState pageState) {
super(state, true, browser);
this.browser = browser;
this.currentPageState = pageState;
pageState.assertCurrentPageIs(Page.EDIT_TEMPLATES_TAB);
}
@com.thoughtworks.gauge.Step("Verify that templates <csTemplates> are present")
public void verifyThatTemplatesArePresent(String csTemplates) {
CommaSeparatedParams templates = new CommaSeparatedParams(csTemplates);
for (String template : templates) {
assertThat(String.format("Template must be present '%s'", template), elementTemplateName(template).exists(), Is.is(true));
}
}
private ElementStub elementTemplateName(String template) {
return browser.heading2(template);
}
private ElementStub elementTemplateContainer(String template) {
return browser.div(String.format("template_container_%s", template));
}
@com.thoughtworks.gauge.Step("Verify that template <templateName> is used by pipelines <csPipelines>")
public void verifyThatTemplateIsUsedByPipelines(String templateName, String csPipelines) throws Exception {
CommaSeparatedParams pipelines = new CommaSeparatedParams(csPipelines);
for (String pipeline : pipelines) {
String actualName = scenarioState.pipelineNamed(pipeline);
ElementStub pipelineUnderTemplate = browser.link(actualName).in(elementTemplateContainer(templateName));
assertThat(String.format("Pipeline '%s' must be present under '%s'", actualName, templateName), pipelineUnderTemplate.exists(), Is.is(true));
}
}
@com.thoughtworks.gauge.Step("Verify that template <templateName> has message <message>")
public void verifyThatTemplateHasMessage(String templateName, String message) throws Exception {
ElementStub informationField = browser.div("information").in(elementTemplateContainer(templateName));
assertThat(informationField.getText(), Is.is(message));
}
@com.thoughtworks.gauge.Step("Verify that edit pipeline <pipelineName> lands on pipeline edit page")
public void verifyThatEditPipelineLandsOnPipelineEditPage(String pipelineName) throws Exception {
String actualName = scenarioState.pipelineNamed(pipelineName);
ElementStub pipelineLink = browser.link(actualName);
String href = pipelineLink.fetch("href");
assertThat(String.format("Edit pipeline must have URL: '%s'", href), href, StringContains.containsString(String.format("/admin/pipelines/%s", actualName)));
}
@com.thoughtworks.gauge.Step("Verify cannot delete templates <csTemplates>")
public void verifyCannotDeleteTemplates(String csTemplates) throws Exception {
CommaSeparatedParams templates = new CommaSeparatedParams(csTemplates);
for (String template : templates) {
assertThat(String.format("Template delete must be disabled for '%s'", template), browser.span(Regex.wholeWord("delete_icon_disabled")).in(elementTemplateContainer(template)).exists(), is(true));
}
}
@com.thoughtworks.gauge.Step("Verify can delete tempates <csTemplates>")
public void verifyCanDeleteTempates(String csTemplates) throws Exception {
CommaSeparatedParams templates = new CommaSeparatedParams(csTemplates);
for (String template : templates) {
assertThat(String.format("Template delete must be enabled for '%s'", template), elementDeleteTemplate(template).exists(), is(true));
}
}
private ElementStub elementDeleteTemplate(String template) {
return browser.byId(String.format("trigger_delete_%s", template));
}
@com.thoughtworks.gauge.Step("Delete template <templateName>")
public void deleteTemplate(String templateName) throws Exception {
elementDeleteTemplate(templateName).click();
proceedWithConfirmPrompt();
isMessagePresent("Saved successfully.");
}
private boolean isMessagePresent(final String value) {
return Assertions.waitFor(Timeout.TWENTY_SECONDS, new Assertions.Function<Boolean>() {
public Boolean call() {
ElementStub message = browser.div(value);
return message.exists();
}
}, new Assertions.FailureHandler<Boolean>() {
public Boolean invoke(Exception e, Timeout timeout, Assertions.Function<Boolean> func) {
return false;
}
});
}
@com.thoughtworks.gauge.Step("Verify that templates <csTemplates> are not present")
public void verifyThatTemplatesAreNotPresent(String csTemplates) throws Exception {
CommaSeparatedParams templates = new CommaSeparatedParams(csTemplates);
for (String template : templates) {
assertThat(elementTemplateName(template).exists(), is(false));
}
}
@Override
protected String url() {
return null;
}
@com.thoughtworks.gauge.Step("Add new template")
public void addNewTemplate() throws Exception {
browser.link("Add New Template").click();
currentPageState.currentPageIs(Page.NEW_TEMPLATE_POPUP);
}
@com.thoughtworks.gauge.Step("Edit template <templateName>")
public void editTemplate(String templateName) throws Exception {
ElementStub templateHeadingParent = browser.heading2(templateName).parentNode();
ElementStub editLink = browser.link(Regex.wholeWord("edit_icon")).in(templateHeadingParent);
editLink.click();
currentPageState.currentPageIs(Page.EDIT_TEMPLATE_PAGE);
}
@com.thoughtworks.gauge.Step("Delete template with confirm prompt <templateName>")
public void deleteTemplateWithConfirmPrompt(String templateName) throws Exception {
elementDeleteTemplate(templateName).click();
}
@com.thoughtworks.gauge.Step("Assert mD5 - Already On Templates Listing tab")
public void assertMD5() throws Exception {
String md5value = scenarioState.getValueFromStore(ConfigState.md5key);
assertEquals(browser.hidden("config_md5").getValue(), md5value);
}
@com.thoughtworks.gauge.Step("Verify cannot add new template for template admin")
public void verifyCannotAddNewTemplateForTemplateAdmin() {
ElementStub templateContainer = browser.div("templates");
ElementStub secondaryTemplateContainer = browser.span("title_secondary_info").in(templateContainer);
assertThat("Add new template must be disabled for template admin",
browser.span("add_icon_disabled").in(secondaryTemplateContainer).exists(), is(true));
}
@com.thoughtworks.gauge.Step("Verify that <templateName> template has permissions link enabled and click on it")
public void verifyThatTemplateHasPermissionsLinkEnabledAndClickOnIt(String templateName) {
ElementStub templateDiv = browser.heading2(templateName).parentNode();
ElementStub permissionsLink = browser.link("Permissions").in(templateDiv);
assertTrue(permissionsLink.exists());
permissionsLink.click();
currentPageState.currentPageIs(Page.PERMISSIONS_PAGE_FOR_TEMPLATE);
}
@com.thoughtworks.gauge.Step("Verify that template <templateName> is present with disabled permissions link")
public void verifyThatTemplateIsPresentWithDisabledPermissionsLink(String templateName) {
verifyThatTemplatesArePresent(templateName);
ElementStub templateDiv = browser.heading2(templateName).parentNode();
ElementStub permissionsLink = browser.link("Permissions").in(templateDiv);
assertThat(permissionsLink.exists(), is(false));
}
@com.thoughtworks.gauge.Step("Verify message <message> for template <templateName>")
public void verifyMessageForTemplate(String message, String templateName) throws Exception {
ElementStub templateDiv = browser.div("template_container_"+templateName);
ElementStub pipelineTable = browser.table("list_table").in(templateDiv);
ElementStub spanMessage = browser.span("").in(pipelineTable);
assertEquals(spanMessage.getText().trim(),message+".");
}
}
| {
"content_hash": "16cd2c3e57b82d3996fe30c2824613f3",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 206,
"avg_line_length": 47.13157894736842,
"alnum_prop": 0.7478503629257398,
"repo_name": "varshavaradarajan/functional-tests",
"id": "92f1f6dbc064c2511e289d24befb34d043a3dbde",
"size": "9701",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/test/java/com/thoughtworks/cruise/page/edit/AlreadyOnTemplatesListingTab.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "12917"
},
{
"name": "C",
"bytes": "144"
},
{
"name": "C++",
"bytes": "37"
},
{
"name": "CSS",
"bytes": "6622"
},
{
"name": "Gherkin",
"bytes": "484"
},
{
"name": "HTML",
"bytes": "1316282"
},
{
"name": "Java",
"bytes": "1846156"
},
{
"name": "JavaScript",
"bytes": "787482"
},
{
"name": "PHP",
"bytes": "2182"
},
{
"name": "Ruby",
"bytes": "132504"
},
{
"name": "Shell",
"bytes": "116894"
},
{
"name": "Tcl",
"bytes": "11341"
},
{
"name": "Visual Basic",
"bytes": "84"
}
],
"symlink_target": ""
} |
'''
1224. Spiral
Time limit: 1.0 second
Memory limit: 64 MB
[Description]
A brand new sapper robot is able to neutralize mines in a rectangular region having
integer height and width (N and M respectively). Before the robot begins its work it
is placed near the top leftmost cell of the rectangle heading right. Then the robot
starts moving and neutralizing mines making a clockwise spiral way (see picture).
The spiral twists towards the inside of the region, covering all the cells. The region
is considered safe when all the cells are visited and checked by the robot.
Your task is to determine the number of the turns the robot has to make during its work.
[Input]
The input contains two integers in the following order: N, M (1 ≤ N, M ≤ 2^31 − 1).
[Output]
The output consists of a single integer value — the number of the turns.
'''
import sys
import math
def get_str_from_stdin():
return sys.stdin.readline().strip('\r\n')
def get_int_from_stdin():
return int(get_str_from_stdin())
def t(n, m):
if n == 1:
return 0
if m == 1:
# here n must be larger than 1
return 1
if n == 2:
# here m must be larger than 1
return 2
if m == 2:
# here n must be larger than 2
return 3
if m >= n:
if n % 2 == 0:
p = n / 2 - 1
return t(2, m - 2 * p) + 4 * p
else:
p = (n - 1) / 2
return t(1, m - 2 * p) + 4 * p
else:
if m % 2 == 0:
p = m / 2 - 1
return t(n - 2 * p, 2) + 4 * p
else:
p = (m - 1) / 2
return t(n - 2 * p, 1) + 4 * p
def calc():
n, m = get_str_from_stdin().split(' ')
n = int(n)
m = int(m)
print t(n, m)
if __name__ == '__main__':
calc()
| {
"content_hash": "77f7ab7aae49c34d0cb5b59d154b1062",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 88,
"avg_line_length": 24.54794520547945,
"alnum_prop": 0.5731026785714286,
"repo_name": "matrixjoeq/timus_solutions",
"id": "f85dbab239f1218a1fa27504ec50a7ab95b9de1f",
"size": "1843",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "1224/slu.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "12510"
},
{
"name": "C++",
"bytes": "96832"
},
{
"name": "Python",
"bytes": "134479"
},
{
"name": "Shell",
"bytes": "181"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
<html xmlns:yui="http://yuilibrary.com/rdf/1.0/yui.rdf#">
<head>
<meta http-equiv="content-type" content="text/html; charset=UTF-8" />
<title>API: bacon (YUI Library)</title>
<link rel="stylesheet" type="text/css" href="assets/reset-fonts-grids-min.css?stamp=1223916184.81" />
<link rel="stylesheet" type="text/css" href="assets/api.css?stamp=1223916184.81" />
<script type="text/javascript" src="assets/api-js?stamp=1223916184.81"></script>
<script type="text/javascript" src="assets/ac-js?stamp=1223916184.81"></script>
</head>
<body id="yahoo-com">
<div id="doc3" class="yui-t2">
<div id="hd">
<h1><a href="http://developer.yahoo.com/yui/" title="Yahoo! UI Library">Yahoo! UI Library</a></h1>
<h3>bacon <span class="subtitle"></span></h3>
<a href="./index.html" title="Yahoo! UI Library">Yahoo! UI Library</a>
> <a href="./module_bacon.html" title="bacon">bacon</a>
<form onsubmit="return false">
<div id="propertysearch">
Search: <input autocomplete="off" id="searchinput" />
<div id="searchresults">
</div>
</div>
</form>
</div>
<div id="bd">
<div id="yui-main">
<div class="yui-b">
<form action="#" name="yui-classopts-form" method="get" id="yui-classopts-form">
<fieldset>
<legend>Filters</legend>
<span class="classopts"><input type="checkbox" name="show_private" id="show_private" /> <label for="show_private">Show Private</label></span>
<span class="classopts"><input type="checkbox" name="show_protected" id="show_protected" /> <label for="show_protected">Show Protected</label></span>
<span class="classopts"><input type="checkbox" name="show_deprecated" id="show_deprecated" /> <label for="show_deprecated">Show Deprecated</label></span>
</fieldset>
</form>
<h3>Module: bacon
</h3>
<div class="description summary">
The Bacon Utility allows you to cook bacon on any HTML element.
</div>
<div class="yui-gc">
<div class="yui-u first">
<p>This module contains the following classes:</p>
<script>
//var YUI_CLASS_LIST = [{"extends": {"superclass": {}, "events": {}, "configs": {}, "properties": {}, "methods": {}}, "description": "This class provides the ability to cook bacon.", "guessedname": "Bacon", "name": "Bacon"}];
</script>
<div id="splash_classList">
<ul>
<li><a href="Bacon.html" title="Bacon" id="class_0">Bacon</a></li>
</ul>
</div>
</div>
<div class="yui-u">
</div>
</div>
</div>
</div>
<div class="yui-b">
<div class="nav">
<div id="moduleList" class="module">
<h4>Modules</h4>
<ul class="content">
<li class="selected"><a href="module_bacon.html" title="bacon">bacon</a></li>
</ul>
</div>
<div id="classList" class="module">
<h4>Classes</h4>
<ul class="content">
<li class=""><a href="Bacon.html" title="Bacon">Bacon</a></li>
</ul>
</div>
<div id="fileList" class="module">
<h4>Files</h4>
<ul class="content">
<li class=""><a href="bacon.js.html" title="bacon.js">bacon.js</a></li>
</ul>
</div>
</div>
</div>
</div>
<div id="ft">
<hr />
Copyright © 2008 Yahoo! Inc. All rights reserved.
</div>
</div>
<script type="text/javascript">
ALL_YUI_PROPS = [{"url": "Bacon.html#event_baconDone", "access": "", "host": "Bacon", "type": "event", "name": "baconDone"}, {"url": "Bacon.html#event_baconReady", "access": "", "host": "Bacon", "type": "event", "name": "baconReady"}, {"url": "Bacon.html#event_baconStart", "access": "", "host": "Bacon", "type": "event", "name": "baconStart"}, {"url": "Bacon.html#method_cook", "access": "", "host": "Bacon", "type": "method", "name": "cook"}, {"url": "Bacon.html#method_fryBacon", "access": "", "host": "Bacon", "type": "method", "name": "fryBacon"}, {"url": "Bacon.html#method_init", "access": "private", "host": "Bacon", "type": "method", "name": "init"}, {"url": "Bacon.html#method__out", "access": "private", "host": "Bacon", "type": "method", "name": "_out"}, {"url": "Bacon.html#method__over", "access": "private", "host": "Bacon", "type": "method", "name": "_over"}, {"url": "Bacon.html#method_stopFryBacon", "access": "", "host": "Bacon", "type": "method", "name": "stopFryBacon"}];
</script>
</body>
</html>
| {
"content_hash": "c754a3f055dcd74f97d8a5123e9fdd3b",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 996,
"avg_line_length": 46.20168067226891,
"alnum_prop": 0.4823572208075664,
"repo_name": "chemouna/yui-examples",
"id": "f7280de3a5dd118681fab0f8ca1f6d58f00ad9f1",
"size": "5498",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bacon/docs/module_bacon.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
'use strict';
// MODULES //
var isString = require( '@stdlib/assert/is-string' ).isPrimitive;
var format = require( '@stdlib/string/format' );
var NAMESPACE = require( './../namespace.js' );
var logger = require( './../console.js' );
// VARIABLES //
var NO_ALIAS_TEXT = 'Unrecognized package name.';
// MAIN //
/**
* Prints the alias corresponding to a provided package name.
*
* @private
* @param {string} pkg - package name
* @throws {TypeError} must provide a string
* @returns {void}
*/
function pkg2alias( pkg ) {
var i;
if ( !isString( pkg ) ) {
throw new TypeError( format( 'invalid argument. Must provide a string. Value: `%s`.', pkg ) );
}
for ( i = 0; i < NAMESPACE.length; i++ ) {
if ( pkg === NAMESPACE[ i ].path ) {
return logger.log( NAMESPACE[ i ].alias );
}
}
logger.log( NO_ALIAS_TEXT );
}
// EXPORTS //
module.exports = pkg2alias;
| {
"content_hash": "e6726ee38850300e564dd0b38a722d80",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 96,
"avg_line_length": 19.931818181818183,
"alnum_prop": 0.6282782212086659,
"repo_name": "stdlib-js/stdlib",
"id": "166af3b9a911dabf007c0e06b1c1de420a73edea",
"size": "1493",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "lib/node_modules/@stdlib/repl/server/lib/functions/pkg2alias.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "21739"
},
{
"name": "C",
"bytes": "15336495"
},
{
"name": "C++",
"bytes": "1349482"
},
{
"name": "CSS",
"bytes": "58039"
},
{
"name": "Fortran",
"bytes": "198059"
},
{
"name": "HTML",
"bytes": "56181"
},
{
"name": "Handlebars",
"bytes": "16114"
},
{
"name": "JavaScript",
"bytes": "85975525"
},
{
"name": "Julia",
"bytes": "1508654"
},
{
"name": "Makefile",
"bytes": "4806816"
},
{
"name": "Python",
"bytes": "3343697"
},
{
"name": "R",
"bytes": "576612"
},
{
"name": "Shell",
"bytes": "559315"
},
{
"name": "TypeScript",
"bytes": "19309407"
},
{
"name": "WebAssembly",
"bytes": "5980"
}
],
"symlink_target": ""
} |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// IBClogger.H
//
//
// Infrastructure for recording touches of EE data structures
//
//
#ifndef IBCLOGGER_H
#define IBCLOGGER_H
#include <holder.h>
#include <sarray.h>
#include <crst.h>
#include <synch.h>
#include <shash.h>
// The IBCLogger class records touches of EE data structures. It is important to
// minimize the overhead of IBC recording on non-recording scenarios. Our goal is
// for all public methods to be inlined, and that the cost of doing the instrumentation
// check does not exceed one comparison and one branch.
//
class MethodDesc;
class MethodTable;
class EEClass;
class TypeHandle;
struct DispatchSlot;
class Module;
struct EEClassHashEntry;
class IBCLogger;
extern IBCLogger g_IBCLogger;
typedef PTR_VOID HashDatum;
typedef Pair< Module*, mdToken > RidMapLogData;
#if !defined(DACCESS_COMPILE) && !defined(CROSSGEN_COMPILE)
#define IBCLOGGER_ENABLED
#endif
#ifdef IBCLOGGER_ENABLED
//
// Base class for IBC probe callback
//
typedef void (* const pfnIBCAccessCallback)(IBCLogger* pLogger, const void * pValue, const void * pValue2);
class IbcCallback
{
public:
IbcCallback(pfnIBCAccessCallback pCallback, const void * pValue1, const void * pValue2)
: m_pCallback(pCallback),
m_pValue1(pValue1),
m_pValue2(pValue2),
m_tryCount(0)
#ifdef _DEBUG
, m_id(0)
#endif
{ LIMITED_METHOD_CONTRACT; }
void Invoke() const
{
WRAPPER_NO_CONTRACT;
m_pCallback(&g_IBCLogger, m_pValue1, m_pValue2);
}
SIZE_T GetPfn() const
{
LIMITED_METHOD_CONTRACT;
return (SIZE_T) m_pCallback;
}
pfnIBCAccessCallback GetCallback() const
{
LIMITED_METHOD_CONTRACT;
return m_pCallback;
}
const void * GetValue1() const
{
LIMITED_METHOD_CONTRACT;
return m_pValue1;
}
const void * GetValue2() const
{
LIMITED_METHOD_CONTRACT;
return m_pValue2;
}
void SetValid()
{
LIMITED_METHOD_CONTRACT;
#ifdef _DEBUG
m_id = ++s_highestId;
#endif
}
void Invalidate()
{
LIMITED_METHOD_CONTRACT;
#ifdef _DEBUG
m_id = 0;
#endif
}
bool IsValid() const
{
WRAPPER_NO_CONTRACT;
#ifdef _DEBUG
return (m_id > 0) && (m_id <= s_highestId);
#else
return true;
#endif
}
int IncrementTryCount()
{
return ++m_tryCount;
}
int GetTryCount() const
{
return m_tryCount;
}
private:
pfnIBCAccessCallback m_pCallback;
const void * m_pValue1;
const void * m_pValue2;
int m_tryCount;
#ifdef _DEBUG
unsigned m_id;
static unsigned s_highestId;
#endif
};
class DelayCallbackTableTraits : public DefaultSHashTraits< IbcCallback * >
{
public:
typedef IbcCallback * key_t;
static key_t GetKey(element_t e)
{
LIMITED_METHOD_CONTRACT;
return e;
}
static BOOL Equals(key_t k1, key_t k2)
{
LIMITED_METHOD_CONTRACT;
return (k1->GetCallback() == k2->GetCallback()) &&
(k1->GetValue1() == k2->GetValue1()) &&
(k1->GetValue2() == k2->GetValue2());
}
static count_t Hash(key_t k)
{
LIMITED_METHOD_CONTRACT;
SIZE_T hashLarge = (SIZE_T)k->GetCallback() ^
(SIZE_T)k->GetValue1() ^
(SIZE_T)k->GetValue2();
#if POINTER_BITS == 32
// sizeof(SIZE_T) == sizeof(COUNT_T)
return hashLarge;
#else
// xor in the upper half as well.
count_t hash = *(count_t *)(&hashLarge);
for (unsigned int i = 1; i < POINTER_BITS / 32; i++)
{
hash ^= ((count_t *)&hashLarge)[i];
}
return hash;
#endif // POINTER_BITS
}
static element_t Null()
{
WRAPPER_NO_CONTRACT;
return NULL;
}
static bool IsNull(element_t e)
{
LIMITED_METHOD_CONTRACT;
return e == NULL;
}
static element_t Deleted()
{
WRAPPER_NO_CONTRACT;
return (element_t)-1;
}
static bool IsDeleted(const element_t e)
{
LIMITED_METHOD_CONTRACT;
return e == (element_t)-1;
}
};
typedef SHash< DelayCallbackTableTraits > DelayCallbackTable;
class ThreadLocalIBCInfo
{
public:
ThreadLocalIBCInfo();
~ThreadLocalIBCInfo();
// BOOL IsLoggingDisable()
// This indicates that logging is currently disabled for this thread
// This is used to prevent the logging functionality from
// triggerring more logging (and thus causing a deadlock)
// It is also used to prevent IBC logging whenever a IBCLoggingDisabler
// object is used. For example we use this to disable IBC profiling
// whenever a thread starts a JIT compile event. That is because we
// don't want to "pollute" the IBC data gathering for the things
// that the JIT compiler touches.
// Finally since our IBC logging will need to allocate unmanaged memory
// we also disable IBC logging when we are inside a "can't alloc region"
// Typically this occurs when a thread is performing a GC.
BOOL IsLoggingDisabled()
{
LIMITED_METHOD_CONTRACT;
return m_fLoggingDisabled || IsInCantAllocRegion();
}
// We want to disable IBC logging, any further log calls are to be ignored until
// we call EnableLogging()
//
// This method returns true if it changed the value of m_fLoggingDisabled from false to true
// it returns false if the value of m_fLoggingDisabled was already set to true
// after this method executes the value of m_fLoggingDisabled will be true
bool DisableLogging()
{
LIMITED_METHOD_CONTRACT;
bool result = (m_fLoggingDisabled == false);
m_fLoggingDisabled = true;
return result;
}
// We want to re-enable IBC logging
void EnableLogging()
{
LIMITED_METHOD_CONTRACT;
_ASSERTE(m_fLoggingDisabled == true);
m_fLoggingDisabled = false;
}
bool ProcessingDelayedList()
{
LIMITED_METHOD_CONTRACT;
return m_fProcessingDelayedList;
}
void SetCallbackFailed()
{
LIMITED_METHOD_CONTRACT;
m_fCallbackFailed = true;
}
int GetMinCountToProcess()
{
LIMITED_METHOD_CONTRACT;
return m_iMinCountToProcess;
}
void IncMinCountToProcess(int increment)
{
LIMITED_METHOD_CONTRACT;
m_iMinCountToProcess += increment;
}
DelayCallbackTable * GetPtrDelayList();
void DeleteDelayedCallbacks();
void FlushDelayedCallbacks();
int ProcessDelayedCallbacks();
void CallbackHelper(const void * p, pfnIBCAccessCallback callback);
private:
bool m_fProcessingDelayedList;
bool m_fCallbackFailed;
bool m_fLoggingDisabled;
int m_iMinCountToProcess;
DelayCallbackTable * m_pDelayList;
};
class IBCLoggingDisabler
{
public:
IBCLoggingDisabler();
IBCLoggingDisabler(bool ignore); // When ignore is true we treat this as a nop
IBCLoggingDisabler(ThreadLocalIBCInfo* pInfo);
~IBCLoggingDisabler();
private:
ThreadLocalIBCInfo* m_pInfo;
bool m_fDisabled; // true if this holder actually disable the logging
// false when this is a nested occurance and logging was already disabled
};
//
// IBCLoggerAwareAllocMemTracker should be used for allocation of IBC tracked structures during type loading.
//
// If type loading fails, the delayed IBC callbacks may contain pointers to the failed type or method.
// IBCLoggerAwareAllocMemTracker will ensure that the delayed IBC callbacks are flushed before the memory of
// the failed type or method is reclaimed. Otherwise, there would be stale pointers in the delayed IBC callbacks
// that would cause crashed during IBC logging.
//
class IBCLoggerAwareAllocMemTracker : public AllocMemTracker
{
public:
IBCLoggerAwareAllocMemTracker()
{
WRAPPER_NO_CONTRACT;
}
~IBCLoggerAwareAllocMemTracker();
};
#else // IBCLOGGER_ENABLED
typedef const void * pfnIBCAccessCallback;
class IBCLoggingDisabler
{
public:
IBCLoggingDisabler()
{
}
~IBCLoggingDisabler()
{
}
};
class ThreadLocalIBCInfo
{
public:
ThreadLocalIBCInfo()
{
}
~ThreadLocalIBCInfo()
{
}
};
class IBCLoggerAwareAllocMemTracker : public AllocMemTracker
{
public:
IBCLoggerAwareAllocMemTracker()
{
}
~IBCLoggerAwareAllocMemTracker()
{
}
};
#endif // IBCLOGGER_ENABLED
// IBCLogger is responsible for collecting profile data. Logging is turned on by the
// COMPlus_ZapBBInstr environment variable, and the actual writing to the file
// occurs in code:Module.WriteMethodProfileDataLogFile
class IBCLogger
{
//
// Methods for logging EE data structure accesses. All methods should be defined
// using the LOGACCESS macros, which creates the wrapper method that calls the
// helper when instrumentation is enabled. The public name of these methods should
// be of the form Log##name##Access where name describes the type of access to be
// logged. The private helpers are implemented in IBClogger.cpp.
//
#ifdef IBCLOGGER_ENABLED
#define LOGACCESS_PTR(name, type) \
LOGACCESS(name, type*, (type*), (const void *));
#define LOGACCESS_VALUE(name, type) \
LOGACCESS(name, type, *(type*), (const void *)&);
#define LOGACCESS(name, type, totype, toptr) \
public: \
__forceinline void Log##name##Access(type p) \
{ \
WRAPPER_NO_CONTRACT; \
/* We expect this to get inlined, so that it */ \
/* has low overhead when not instrumenting. */ \
/* So keep the function really small */ \
if ( InstrEnabled() ) \
Log##name##AccessStatic(toptr p); \
} \
\
private: \
NOINLINE static void Log##name##AccessStatic(const void * p) \
{ \
WRAPPER_NO_CONTRACT; \
/* To make the logging callsite as small as */ \
/* possible keep the part that passes extra */ \
/* argument to LogAccessThreadSafeHelper */ \
/* in separate non-inlined static functions */ \
LogAccessThreadSafeHelperStatic(p, Log##name##AccessWrapper); \
} \
\
static void Log##name##AccessWrapper(IBCLogger* pLogger, const void * pValue1, const void * pValue2) \
{ \
WRAPPER_NO_CONTRACT; \
return pLogger->Log##name##AccessHelper(totype pValue1); \
} \
void Log##name##AccessHelper(type p); \
private:
static void LogAccessThreadSafeHelperStatic( const void * p, pfnIBCAccessCallback callback);
void LogAccessThreadSafeHelper( const void * p, pfnIBCAccessCallback callback);
void DelayedCallbackPtr(pfnIBCAccessCallback callback, const void * pValue1, const void * pValue2 = NULL);
#else // IBCLOGGER_ENABLED
#define LOGACCESS_PTR(name,type) \
public: \
void Log##name##Access(type* p) { SUPPORTS_DAC; } \
#define LOGACCESS_VALUE(name, type) \
public: \
void Log##name##Access(type p) { SUPPORTS_DAC; } \
#endif // IBCLOGGER_ENABLED
// Log access to method code or method header
// Implemented by : code:IBCLogger.LogMethodCodeAccessHelper
LOGACCESS_PTR(MethodCode, MethodDesc)
// Log access to gc info
// Implemented by : code:IBCLogger.LogMethodGCInfoAccessHelper
LOGACCESS_PTR(MethodGCInfo, MethodDesc)
// The accesses to individual datastructures matter for fragile NGen only
#ifndef FEATURE_PREJIT
#undef LOGACCESS_PTR
#undef LOGACCESS_VALUE
#define LOGACCESS_PTR(name,type) \
public: \
void Log##name##Access(type* p) { SUPPORTS_DAC; } \
#define LOGACCESS_VALUE(name, type) \
public: \
void Log##name##Access(type p) { SUPPORTS_DAC; } \
#endif // FEATURE_PREJIT
// Log access to method desc (which adds the method desc to the required list)
// Implemented by : code:IBCLogger.LogMethodDescAccessHelper
LOGACCESS_PTR(MethodDesc, const MethodDesc)
// Log access to the NDirect data stored for a MethodDesc
// also implies that the IL_STUB for the NDirect method is executed
// Implemented by : code:IBCLogger.LogNDirectCodeAccessHelper
LOGACCESS_PTR(NDirectCode,MethodDesc)
// Log access to method desc (which addes the method desc to the required list)
// Implemented by : code:IBCLogger.LogMethodDescWriteAccessHelper
LOGACCESS_PTR(MethodDescWrite,MethodDesc)
// Log access to method desc (which adds the method desc to the required list)
// Implemented by : code:IBCLogger.LogMethodPrecodeAccessHelper
LOGACCESS_PTR(MethodPrecode, MethodDesc)
// Log access to method desc (which addes the method desc to the required list)
// Implemented by : code:IBCLogger.LogMethodPrecodeWriteAccessHelper
LOGACCESS_PTR(MethodPrecodeWrite,MethodDesc)
// Log access to method table
// Implemented by : code:IBCLogger.LogMethodTableAccessHelper
LOGACCESS_PTR(MethodTable, MethodTable const)
// Log access to method table
// Implemented by : code:IBCLogger.LogTypeMethodTableAccessHelper
LOGACCESS_PTR(TypeMethodTable, TypeHandle const)
// Log write access to method table
// Implemented by : code:IBCLogger.LogTypeMethodTableWriteableAccessHelper
LOGACCESS_PTR(TypeMethodTableWriteable, TypeHandle const)
// Log read access to private (written to) method table area
// Macro expands to : code:LogMethodTableWriteableDataAccessHelper
LOGACCESS_PTR(MethodTableWriteableData, MethodTable const)
// Log write access to private (written to) method table area
// Implemented by : code:IBCLogger.LogMethodTableWriteableDataWriteAccessHelper
LOGACCESS_PTR(MethodTableWriteableDataWrite,MethodTable)
// Log access to method table's NonVirtualSlotsArray
// Implemented by : code:IBCLogger.LogMethodTableNonVirtualSlotsAccessHelper
LOGACCESS_PTR(MethodTableNonVirtualSlots, MethodTable const)
// Log access to EEClass
// Implemented by : code:IBCLogger.LogEEClassAndMethodTableAccessHelper
LOGACCESS_PTR(EEClassAndMethodTable, MethodTable)
// Log access to EEClass COW table
// Implemented by : code:IBCLogger.LogEEClassCOWTableAccessHelper
LOGACCESS_PTR(EEClassCOWTable, MethodTable)
// Log access to the FieldDescs list in the EEClass
// Implemented by : code:IBCLogger.LogFieldDescsAccessHelper
LOGACCESS_PTR(FieldDescs, FieldDesc)
// Log access to the MTs dispatch map
// Implemented by : code:IBCLogger.LogDispatchMapAccessHelper
LOGACCESS_PTR(DispatchMap,MethodTable)
// Log read access to the MTs dispatch implementation table
// Implemented by : code:IBCLogger.LogDispatchTableAccessHelper
LOGACCESS_PTR(DispatchTable,MethodTable)
// Log read access to the MTs dispatch implementation table
// Implemented by : code:IBCLogger.LogDispatchTableAccessHelper
LOGACCESS_PTR(DispatchTableSlot,DispatchSlot)
// Log an update to the field marshalers
// Implemented by : code:IBCLogger.LogFieldMarshalersReadAccessHelper
LOGACCESS_PTR(FieldMarshalersRead,MethodTable)
// Log a lookup in the cctor info table
// Implemented by : code:IBCLogger.LogCCtorInfoReadAccessHelper
LOGACCESS_PTR(CCtorInfoRead,MethodTable)
// Log a lookup in the class hash table
// Implemented by : code:IBCLogger.LogClassHashTableAccessHelper
LOGACCESS_PTR(ClassHashTable,EEClassHashEntry)
// Log a lookup of the method list for a CER
// Implemented by : code:IBCLogger.LogCerMethodListReadAccessHelper
LOGACCESS_PTR(CerMethodListRead,MethodDesc)
// Log a metadata access
// Implemented by : code:IBCLogger.LogMetaDataAccessHelper
LOGACCESS_PTR(MetaData,const void)
// Log a metadata search
// Implemented by : code:IBCLogger.LogMetaDataSearchAccessHelper
LOGACCESS_PTR(MetaDataSearch,const void)
// Log a RVA fielddesc access */
// Implemented by : code:IBCLogger.LogRVADataAccessHelper
LOGACCESS_PTR(RVAData,FieldDesc)
// Log a lookup in the type hash table
// Implemented by : code:IBCLogger.LogTypeHashTableAccessHelper
LOGACCESS_PTR(TypeHashTable,TypeHandle const)
// Log a lookup in the Rid map
// Implemented by : code:IBCLogger.LogRidMapAccessHelper
LOGACCESS_VALUE( RidMap, RidMapLogData );
public:
#ifdef IBCLOGGER_ENABLED
IBCLogger();
~IBCLogger();
// Methods for enabling/disabling instrumentation.
void EnableAllInstr();
void DisableAllInstr();
#else // IBCLOGGER_ENABLED
void EnableAllInstr()
{
}
void DisableAllInstr()
{
}
#endif // IBCLOGGER_ENABLED
#ifndef DACCESS_COMPILE
void DisableRidAccessOrderInstr();
void DisableMethodDescAccessInstr();
inline BOOL InstrEnabled()
{
SUPPORTS_DAC;
return (dwInstrEnabled != 0);
}
static CrstStatic * GetSync();
private:
void LogMethodAccessHelper(const MethodDesc* pMD, ULONG flagNum);
static void LogMethodAccessWrapper(IBCLogger* pLogger, const void * pValue1, const void * pValue2);
void LogTypeAccessHelper(TypeHandle th, ULONG flagNum);
static void LogTypeAccessWrapper(IBCLogger* pLogger, const void * pValue1, const void * pValue2);
BOOL MethodDescAccessInstrEnabled();
BOOL RidAccessInstrEnabled();
private:
DWORD dwInstrEnabled;
static CrstStatic m_sync;
#endif // DACCESS_COMPILE
};
#endif // IBCLOGGER_H
| {
"content_hash": "56db80f3a4f5df2da862cd15769d3908",
"timestamp": "",
"source": "github",
"line_count": 637,
"max_line_length": 112,
"avg_line_length": 29.38304552590267,
"alnum_prop": 0.6447614468130577,
"repo_name": "wtgodbe/coreclr",
"id": "6aba2445e90378bd1e2a7ff6c2edb9afcb46d8e3",
"size": "18717",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "src/vm/ibclogger.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "976648"
},
{
"name": "Awk",
"bytes": "6904"
},
{
"name": "Batchfile",
"bytes": "167893"
},
{
"name": "C",
"bytes": "4862319"
},
{
"name": "C#",
"bytes": "154822068"
},
{
"name": "C++",
"bytes": "64306017"
},
{
"name": "CMake",
"bytes": "723128"
},
{
"name": "M4",
"bytes": "15214"
},
{
"name": "Makefile",
"bytes": "46117"
},
{
"name": "Objective-C",
"bytes": "14116"
},
{
"name": "Perl",
"bytes": "23653"
},
{
"name": "PowerShell",
"bytes": "132755"
},
{
"name": "Python",
"bytes": "480080"
},
{
"name": "Roff",
"bytes": "672227"
},
{
"name": "Scala",
"bytes": "4102"
},
{
"name": "Shell",
"bytes": "513230"
},
{
"name": "Smalltalk",
"bytes": "635930"
},
{
"name": "SuperCollider",
"bytes": "650"
},
{
"name": "TeX",
"bytes": "126781"
},
{
"name": "XSLT",
"bytes": "1016"
},
{
"name": "Yacc",
"bytes": "157492"
}
],
"symlink_target": ""
} |
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "25f9493bfd1522777d7d66a4075749b4",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.23076923076923,
"alnum_prop": 0.6917293233082706,
"repo_name": "mdoering/backbone",
"id": "f95aaee6b0830b1ac55cf5850e39f7cd2290cf1f",
"size": "196",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Geraniales/Geraniaceae/Geranium/Geranium cataractarum/ Syn. Geranium occitanum/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package com.fh.fhzhihudaily.api.net;
/**
* NetConstants <br/>
* Created by Jason.fang on 2016-04-25.
*/
public class NetConstants {
public static final long TIME_OUT = 10L;
}
| {
"content_hash": "ae99f904d1393fa00e65d70d56af726d",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 44,
"avg_line_length": 20.333333333333332,
"alnum_prop": 0.6885245901639344,
"repo_name": "huige123/SimpleZhihuDaily",
"id": "cd75c9dec64ce1faf13586750d6b8a48c10454a3",
"size": "183",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/java/com/fh/fhzhihudaily/api/net/NetConstants.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "35328"
}
],
"symlink_target": ""
} |
@import url("message_common.css");
.out.content .nameheader {
background: url("../images/sender/sender_fill.png") top repeat-x;
color: rgba(170,17,17,0.8);
}
.out.context .nameheader {
background: url("../images/sender/sender_context_fill.png") top repeat-x;
color: rgba(170,17,17,0.5);
}
.in.content .nameheader {
background: url("../images/sender/sender_fill.png") top repeat-x;
color: rgba(39,55,152,0.8);
}
.in.context .nameheader {
background: url("../images/sender/sender_context_fill.png") top repeat-x;
color: rgba(39,55,152,0.5);
}
.out.content .name {
background: url("../images/sender/standard/sender_left.png") left top no-repeat;
}
.out.context .name {
background: url("../images/sender/standard/sender_context_left.png") left top no-repeat;
}
.in.content .name {
background: url("../images/sender/flipped/sender_right.png") right top no-repeat;
}
.in.context .name {
background: url("../images/sender/flipped/sender_context_right.png") right top no-repeat;
}
.out.content .protocol {
background: url("../images/sender/standard/red/sender_right.png") right top no-repeat;
color: rgba(170,17,17,0.4);
}
.out.context .protocol {
background: url("../images/sender/standard/red/sender_context_right.png") right top no-repeat;
color: rgba(170,17,17,0.3);
}
.in.content .protocol {
background: url("../images/sender/flipped/blue/sender_left.png") left top no-repeat;
color: rgba(39,55,152,0.4);
}
.in.context .protocol {
background: url("../images/sender/flipped/blue/sender_context_left.png") left top no-repeat;
color: rgba(39,55,152,0.3);
}
.status {
background: url("../images/status/purple/status_fill.png") top repeat-x;
}
.statusmessage {
background: url("../images/status/purple/status_left.png") left top no-repeat;
}
.statustime {
background: url("../images/status/purple/status_right.png") right top no-repeat;
} | {
"content_hash": "46c734ccce3419ab4626d5e0a889f4c1",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 95,
"avg_line_length": 32.275862068965516,
"alnum_prop": 0.7019230769230769,
"repo_name": "vipinraj/Spark",
"id": "f52f34859c62b75383cddb85e2d0d75527f84723",
"size": "1872",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "core/src/main/resources/themes/Satin.AdiumMessageStyle/Contents/Resources/styles/red_blue_purple_alternate_left.css",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1520"
},
{
"name": "C",
"bytes": "437"
},
{
"name": "CSS",
"bytes": "89362"
},
{
"name": "HTML",
"bytes": "61161"
},
{
"name": "Java",
"bytes": "4662686"
},
{
"name": "Shell",
"bytes": "897"
},
{
"name": "XSLT",
"bytes": "918"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="generator" content="rustdoc">
<meta name="description" content="API documentation for the Rust `N586` type in crate `typenum`.">
<meta name="keywords" content="rust, rustlang, rust-lang, N586">
<title>typenum::consts::N586 - Rust</title>
<link rel="stylesheet" type="text/css" href="../../normalize.css">
<link rel="stylesheet" type="text/css" href="../../rustdoc.css">
<link rel="stylesheet" type="text/css" href="../../main.css">
</head>
<body class="rustdoc type">
<!--[if lte IE 8]>
<div class="warning">
This old browser is unsupported and will most likely display funky
things.
</div>
<![endif]-->
<nav class="sidebar">
<p class='location'><a href='../index.html'>typenum</a>::<wbr><a href='index.html'>consts</a></p><script>window.sidebarCurrent = {name: 'N586', ty: 'type', relpath: ''};</script><script defer src="sidebar-items.js"></script>
</nav>
<nav class="sub">
<form class="search-form js-only">
<div class="search-container">
<input class="search-input" name="search"
autocomplete="off"
placeholder="Click or press ‘S’ to search, ‘?’ for more options…"
type="search">
</div>
</form>
</nav>
<section id='main' class="content">
<h1 class='fqn'><span class='in-band'>Type Definition <a href='../index.html'>typenum</a>::<wbr><a href='index.html'>consts</a>::<wbr><a class="type" href=''>N586</a></span><span class='out-of-band'><span id='render-detail'>
<a id="toggle-all-docs" href="javascript:void(0)" title="collapse all docs">
[<span class='inner'>−</span>]
</a>
</span><a class='srclink' href='../../src/typenum/home/jacob/nitro-game-engine/target/debug/build/typenum-cb7a8e569dce0703/out/consts.rs.html#1232' title='goto source code'>[src]</a></span></h1>
<pre class='rust typedef'>type N586 = <a class="struct" href="../../typenum/int/struct.NInt.html" title="struct typenum::int::NInt">NInt</a><<a class="type" href="../../typenum/consts/type.U586.html" title="type typenum::consts::U586">U586</a>>;</pre></section>
<section id='search' class="content hidden"></section>
<section class="footer"></section>
<aside id="help" class="hidden">
<div>
<h1 class="hidden">Help</h1>
<div class="shortcuts">
<h2>Keyboard Shortcuts</h2>
<dl>
<dt>?</dt>
<dd>Show this help dialog</dd>
<dt>S</dt>
<dd>Focus the search field</dd>
<dt>⇤</dt>
<dd>Move up in search results</dd>
<dt>⇥</dt>
<dd>Move down in search results</dd>
<dt>⏎</dt>
<dd>Go to active search result</dd>
<dt>+</dt>
<dd>Collapse/expand all sections</dd>
</dl>
</div>
<div class="infos">
<h2>Search Tricks</h2>
<p>
Prefix searches with a type followed by a colon (e.g.
<code>fn:</code>) to restrict the search to a given type.
</p>
<p>
Accepted types are: <code>fn</code>, <code>mod</code>,
<code>struct</code>, <code>enum</code>,
<code>trait</code>, <code>type</code>, <code>macro</code>,
and <code>const</code>.
</p>
<p>
Search functions by type signature (e.g.
<code>vec -> usize</code> or <code>* -> vec</code>)
</p>
</div>
</div>
</aside>
<script>
window.rootPath = "../../";
window.currentCrate = "typenum";
</script>
<script src="../../jquery.js"></script>
<script src="../../main.js"></script>
<script defer src="../../search-index.js"></script>
</body>
</html> | {
"content_hash": "89e0de67519a5cc8e0e0dc9cf0afe171",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 267,
"avg_line_length": 38.530973451327434,
"alnum_prop": 0.5089572806614607,
"repo_name": "nitro-devs/nitro-game-engine",
"id": "5393b6561f1837f924f1f713c49c4bb4a1112532",
"size": "4364",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/typenum/consts/type.N586.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CMake",
"bytes": "1032"
},
{
"name": "Rust",
"bytes": "59380"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.