text stringlengths 2 1.04M | meta dict |
|---|---|
ACCEPTED
#### According to
Index Fungorum
#### Published in
Mycotaxon 49: 62 (1993)
#### Original name
Piricaudiopsis appendiculata Bhat & W.B. Kendr.
### Remarks
null | {
"content_hash": "a3cbf5f72481a66d09c19778488f5217",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 47,
"avg_line_length": 13.153846153846153,
"alnum_prop": 0.7076023391812866,
"repo_name": "mdoering/backbone",
"id": "1f49ebfe8a43c465bd9b7d81e3e86efe2a137fc8",
"size": "242",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Fungi/Ascomycota/Piricaudiopsis/Piricaudiopsis appendiculata/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ash/system/message_center/notification_swipe_control_view.h"
#include "ash/constants/ash_features.h"
#include "ash/system/message_center/message_center_style.h"
#include "ash/system/message_center/metrics_utils.h"
#include "base/bind.h"
#include "base/i18n/rtl.h"
#include "ui/base/l10n/l10n_util.h"
#include "ui/compositor/layer.h"
#include "ui/events/event.h"
#include "ui/gfx/color_palette.h"
#include "ui/gfx/paint_vector_icon.h"
#include "ui/message_center/vector_icons.h"
#include "ui/message_center/views/message_view.h"
#include "ui/message_center/views/notification_background_painter.h"
#include "ui/message_center/views/notification_control_buttons_view.h"
#include "ui/strings/grit/ui_strings.h"
#include "ui/views/background.h"
#include "ui/views/layout/box_layout.h"
namespace ash {
const char NotificationSwipeControlView::kViewClassName[] =
"NotificationSwipeControlView";
NotificationSwipeControlView::NotificationSwipeControlView(
message_center::MessageView* message_view)
: message_view_(message_view) {
auto* layout = SetLayoutManager(std::make_unique<views::BoxLayout>(
views::BoxLayout::Orientation::kHorizontal,
gfx::Insets(message_center_style::kSwipeControlButtonVerticalMargin,
message_center_style::kSwipeControlButtonHorizontalMargin),
message_center_style::kSwipeControlButtonHorizontalMargin));
layout->set_cross_axis_alignment(
views::BoxLayout::CrossAxisAlignment::kStart);
layout->set_main_axis_alignment(views::BoxLayout::MainAxisAlignment::kEnd);
// Draw on its own layer to round corners
SetPaintToLayer();
layer()->SetFillsBoundsOpaquely(false);
}
NotificationSwipeControlView::~NotificationSwipeControlView() = default;
void NotificationSwipeControlView::ShowButtons(ButtonPosition button_position,
bool show_settings,
bool show_snooze) {
views::BoxLayout* layout = static_cast<views::BoxLayout*>(GetLayoutManager());
if ((button_position == ButtonPosition::RIGHT) != base::i18n::IsRTL()) {
layout->set_main_axis_alignment(views::BoxLayout::MainAxisAlignment::kEnd);
} else {
layout->set_main_axis_alignment(
views::BoxLayout::MainAxisAlignment::kStart);
}
ShowSettingsButton(show_settings);
ShowSnoozeButton(show_snooze);
Layout();
}
void NotificationSwipeControlView::HideButtons() {
ShowSettingsButton(false);
ShowSnoozeButton(false);
Layout();
}
void NotificationSwipeControlView::UpdateButtonsVisibility() {
float gesture_amount = message_view_->GetSlideAmount();
if (gesture_amount == 0) {
HideButtons();
return;
}
NotificationSwipeControlView::ButtonPosition button_position =
gesture_amount < 0 ? NotificationSwipeControlView::ButtonPosition::RIGHT
: NotificationSwipeControlView::ButtonPosition::LEFT;
message_center::NotificationControlButtonsView* buttons =
message_view_->GetControlButtonsView();
// Ignore when GetControlButtonsView() returns null.
if (!buttons)
return;
bool has_settings_button = buttons->settings_button();
bool has_snooze_button = buttons->snooze_button();
ShowButtons(button_position, has_settings_button, has_snooze_button);
int control_button_count =
(has_settings_button ? 1 : 0) + (has_snooze_button ? 1 : 0);
int control_button_width =
message_center_style::kSwipeControlButtonSize * control_button_count +
message_center_style::kSwipeControlButtonHorizontalMargin *
(control_button_count ? control_button_count + 1 : 0);
message_view_->SetSlideButtonWidth(control_button_width);
// Update opacity based on the swipe progress. The swipe controls should
// gradually disappear as the user swipes the notification away.
float full_opacity_width =
message_center_style::kSwipeControlFullOpacityRatio *
control_button_width;
float fade_out_width = message_view_->width() - full_opacity_width;
DCHECK(fade_out_width > 0);
float swipe_progress = std::max(
0.0f, (fabs(gesture_amount) - full_opacity_width) / fade_out_width);
float opacity = std::max(0.0f, 1.0f - swipe_progress);
layer()->SetOpacity(opacity);
}
void NotificationSwipeControlView::UpdateCornerRadius(int top_radius,
int bottom_radius) {
// In the new notification UI, there will be no swipe control background.
if (features::IsNotificationsRefreshEnabled())
return;
SetBackground(views::CreateBackgroundFromPainter(
std::make_unique<message_center::NotificationBackgroundPainter>(
top_radius, bottom_radius,
message_center_style::kSwipeControlBackgroundColor)));
SchedulePaint();
}
void NotificationSwipeControlView::ShowSettingsButton(bool show) {
if (show && !settings_button_) {
settings_button_ = new views::ImageButton(
base::BindRepeating(&NotificationSwipeControlView::ButtonPressed,
base::Unretained(this), ButtonId::kSettings));
settings_button_->SetImage(
views::Button::STATE_NORMAL,
gfx::CreateVectorIcon(
message_center::kNotificationSettingsButtonIcon,
message_center_style::kSwipeControlButtonImageSize,
gfx::kChromeIconGrey));
settings_button_->SetImageHorizontalAlignment(
views::ImageButton::ALIGN_CENTER);
settings_button_->SetImageVerticalAlignment(
views::ImageButton::ALIGN_MIDDLE);
settings_button_->SetPreferredSize(
gfx::Size(message_center_style::kSwipeControlButtonSize,
message_center_style::kSwipeControlButtonSize));
settings_button_->SetAccessibleName(l10n_util::GetStringUTF16(
IDS_MESSAGE_NOTIFICATION_SETTINGS_BUTTON_ACCESSIBLE_NAME));
settings_button_->SetTooltipText(l10n_util::GetStringUTF16(
IDS_MESSAGE_NOTIFICATION_SETTINGS_BUTTON_ACCESSIBLE_NAME));
settings_button_->SetBackground(
views::CreateSolidBackground(SK_ColorTRANSPARENT));
settings_button_->SetFocusBehavior(FocusBehavior::ACCESSIBLE_ONLY);
AddChildView(settings_button_);
Layout();
} else if (!show && settings_button_) {
DCHECK(Contains(settings_button_));
delete settings_button_;
settings_button_ = nullptr;
}
}
void NotificationSwipeControlView::ShowSnoozeButton(bool show) {
if (show && !snooze_button_) {
snooze_button_ = new views::ImageButton(
base::BindRepeating(&NotificationSwipeControlView::ButtonPressed,
base::Unretained(this), ButtonId::kSnooze));
snooze_button_->SetImage(
views::Button::STATE_NORMAL,
gfx::CreateVectorIcon(
message_center::kNotificationSnoozeButtonIcon,
message_center_style::kSwipeControlButtonImageSize,
gfx::kChromeIconGrey));
snooze_button_->SetImageHorizontalAlignment(
views::ImageButton::ALIGN_CENTER);
snooze_button_->SetImageVerticalAlignment(views::ImageButton::ALIGN_MIDDLE);
snooze_button_->SetPreferredSize(
gfx::Size(message_center_style::kSwipeControlButtonSize,
message_center_style::kSwipeControlButtonSize));
snooze_button_->SetAccessibleName(l10n_util::GetStringUTF16(
IDS_MESSAGE_NOTIFICATION_SETTINGS_BUTTON_ACCESSIBLE_NAME));
snooze_button_->SetTooltipText(l10n_util::GetStringUTF16(
IDS_MESSAGE_NOTIFICATION_SETTINGS_BUTTON_ACCESSIBLE_NAME));
snooze_button_->SetBackground(
views::CreateSolidBackground(SK_ColorTRANSPARENT));
snooze_button_->SetFocusBehavior(FocusBehavior::ACCESSIBLE_ONLY);
AddChildViewAt(snooze_button_, 0);
Layout();
} else if (!show && snooze_button_) {
DCHECK(Contains(snooze_button_));
delete snooze_button_;
snooze_button_ = nullptr;
}
}
const char* NotificationSwipeControlView::GetClassName() const {
return kViewClassName;
}
void NotificationSwipeControlView::ButtonPressed(ButtonId button,
const ui::Event& event) {
auto weak_this = weak_factory_.GetWeakPtr();
const std::string notification_id = message_view_->notification_id();
if (button == ButtonId::kSettings) {
message_view_->OnSettingsButtonPressed(event);
metrics_utils::LogSettingsShown(notification_id,
/*is_slide_controls=*/true,
/*is_popup=*/false);
} else {
message_view_->OnSnoozeButtonPressed(event);
metrics_utils::LogSnoozed(notification_id,
/*is_slide_controls=*/true,
/*is_popup=*/false);
}
// Button handlers of |message_view_| may have closed |this|.
if (!weak_this)
return;
HideButtons();
// Closing the swipe control is done in these button pressed handlers.
// Otherwise, handlers might not work.
message_view_->CloseSwipeControl();
}
} // namespace ash
| {
"content_hash": "e7a4121648e398a405c283f47341f527",
"timestamp": "",
"source": "github",
"line_count": 226,
"max_line_length": 80,
"avg_line_length": 40.41150442477876,
"alnum_prop": 0.6971422314683018,
"repo_name": "ric2b/Vivaldi-browser",
"id": "e2d360b446aea9bdd1ebcc5457a9678637ca7148",
"size": "9133",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chromium/ash/system/message_center/notification_swipe_control_view.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
package daemon // import "github.com/docker/docker/daemon"
import (
"context"
"encoding/json"
"errors"
"runtime"
"time"
"github.com/docker/docker/api/types"
"github.com/docker/docker/api/types/backend"
"github.com/docker/docker/api/types/versions"
"github.com/docker/docker/api/types/versions/v1p20"
"github.com/docker/docker/container"
"github.com/docker/docker/pkg/ioutils"
)
// ContainerStats writes information about the container to the stream
// given in the config object.
func (daemon *Daemon) ContainerStats(ctx context.Context, prefixOrName string, config *backend.ContainerStatsConfig) error {
// Engine API version (used for backwards compatibility)
apiVersion := config.Version
if isWindows && versions.LessThan(apiVersion, "1.21") {
return errors.New("API versions pre v1.21 do not support stats on Windows")
}
container, err := daemon.GetContainer(prefixOrName)
if err != nil {
return err
}
// If the container is either not running or restarting and requires no stream, return an empty stats.
if (!container.IsRunning() || container.IsRestarting()) && !config.Stream {
return json.NewEncoder(config.OutStream).Encode(&types.StatsJSON{
Name: container.Name,
ID: container.ID})
}
outStream := config.OutStream
if config.Stream {
wf := ioutils.NewWriteFlusher(outStream)
defer wf.Close()
wf.Flush()
outStream = wf
}
var preCPUStats types.CPUStats
var preRead time.Time
getStatJSON := func(v interface{}) *types.StatsJSON {
ss := v.(types.StatsJSON)
ss.Name = container.Name
ss.ID = container.ID
ss.PreCPUStats = preCPUStats
ss.PreRead = preRead
preCPUStats = ss.CPUStats
preRead = ss.Read
return &ss
}
enc := json.NewEncoder(outStream)
updates := daemon.subscribeToContainerStats(container)
defer daemon.unsubscribeToContainerStats(container, updates)
noStreamFirstFrame := true
for {
select {
case v, ok := <-updates:
if !ok {
return nil
}
var statsJSON interface{}
statsJSONPost120 := getStatJSON(v)
if versions.LessThan(apiVersion, "1.21") {
var (
rxBytes uint64
rxPackets uint64
rxErrors uint64
rxDropped uint64
txBytes uint64
txPackets uint64
txErrors uint64
txDropped uint64
)
for _, v := range statsJSONPost120.Networks {
rxBytes += v.RxBytes
rxPackets += v.RxPackets
rxErrors += v.RxErrors
rxDropped += v.RxDropped
txBytes += v.TxBytes
txPackets += v.TxPackets
txErrors += v.TxErrors
txDropped += v.TxDropped
}
statsJSON = &v1p20.StatsJSON{
Stats: statsJSONPost120.Stats,
Network: types.NetworkStats{
RxBytes: rxBytes,
RxPackets: rxPackets,
RxErrors: rxErrors,
RxDropped: rxDropped,
TxBytes: txBytes,
TxPackets: txPackets,
TxErrors: txErrors,
TxDropped: txDropped,
},
}
} else {
statsJSON = statsJSONPost120
}
if !config.Stream && noStreamFirstFrame {
// prime the cpu stats so they aren't 0 in the final output
noStreamFirstFrame = false
continue
}
if err := enc.Encode(statsJSON); err != nil {
return err
}
if !config.Stream {
return nil
}
case <-ctx.Done():
return nil
}
}
}
func (daemon *Daemon) subscribeToContainerStats(c *container.Container) chan interface{} {
return daemon.statsCollector.Collect(c)
}
func (daemon *Daemon) unsubscribeToContainerStats(c *container.Container, ch chan interface{}) {
daemon.statsCollector.Unsubscribe(c, ch)
}
// GetContainerStats collects all the stats published by a container
func (daemon *Daemon) GetContainerStats(container *container.Container) (*types.StatsJSON, error) {
stats, err := daemon.stats(container)
if err != nil {
return nil, err
}
// We already have the network stats on Windows directly from HCS.
if !container.Config.NetworkDisabled && runtime.GOOS != "windows" {
if stats.Networks, err = daemon.getNetworkStats(container); err != nil {
return nil, err
}
}
return stats, nil
}
| {
"content_hash": "d91ba6123804958a4fa0d1ef5f28c9a9",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 124,
"avg_line_length": 25.82051282051282,
"alnum_prop": 0.695382323733863,
"repo_name": "pradipd/moby",
"id": "006d2223b214890d6894635d747c500a27b5aa4a",
"size": "4028",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "daemon/stats.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "81"
},
{
"name": "C",
"bytes": "4815"
},
{
"name": "Dockerfile",
"bytes": "18494"
},
{
"name": "Go",
"bytes": "7478737"
},
{
"name": "Makefile",
"bytes": "10933"
},
{
"name": "PowerShell",
"bytes": "85657"
},
{
"name": "Shell",
"bytes": "137238"
}
],
"symlink_target": ""
} |
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://hibernate.sourceforge.net/hibernate-configuration-3.0.dtd">
<hibernate-configuration>
<session-factory>
<session-factory name="org.redhelix.persistanceUnit">
<property name="hibernate.dialect">org.hibernate.dialect.MySQLDialect</property>
<property name="hibernate.connection.driver_class">com.mysql.jdbc.Driver</property>
<property name="hibernate.connection.url">jdbc:mysql://localhost:3306/RedHelixMoon</property>
<property name="hibernate.connection.username">redHelixAdmin</property>
<property name="hibernate.connection.password">redHelixAdmin</property>
<property name="hibernate.show_sql">true</property>
<property name="hibernate.query.factory_class">org.hibernate.hql.internal.classic.ClassicQueryTranslatorFactory</property>
</session-factory>
</session-factory>
</hibernate-configuration>
| {
"content_hash": "8b1bf7a32164dcc6322b30ba209d60f4",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 134,
"avg_line_length": 62.588235294117645,
"alnum_prop": 0.7114661654135338,
"repo_name": "RedHelixOrg/RedHelix-1",
"id": "2290e507b96d9cf837e7789819749360d4aaded2",
"size": "1064",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "redhx-build-all/redhx-server-db-ipml/src/main/resources/hibernate.cfg.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "2136"
},
{
"name": "HTML",
"bytes": "434"
},
{
"name": "Java",
"bytes": "677051"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta content="IE=edge" http-equiv="X-UA-Compatible">
<meta content="initial-scale=1.0, maximum-scale=1.0, user-scalable=no, width=device-width" name="viewport">
<title>Labels - Material</title>
<!-- css -->
<link href="../css/base.min.css" rel="stylesheet">
<!-- css for doc -->
<link href="../css/project.min.css" rel="stylesheet">
<!-- favicon -->
<!-- ... -->
</head>
<body class="page-brand">
<header class="header header-transparent header-waterfall">
<ul class="nav nav-list pull-left">
<li>
<a data-toggle="menu" href="#doc_menu">
<span class="icon icon-lg">menu</span>
</a>
</li>
</ul>
<a class="header-logo header-affix-hide margin-left-no margin-right-no" data-offset-top="213" data-spy="affix" href="index.html">Material</a>
<span class="header-logo header-affix margin-left-no margin-right-no" data-offset-top="213" data-spy="affix">Labels</span>
<ul class="nav nav-list pull-right">
<li class="dropdown margin-right">
<a class="dropdown-toggle padding-left-no padding-right-no" data-toggle="dropdown">
<span class="access-hide">John Smith</span>
<span class="avatar avatar-sm"><img alt="alt text for John Smith avatar" src="../images/users/avatar-001.jpg"></span>
</a>
<ul class="dropdown-menu dropdown-menu-right">
<li>
<a class="padding-right-lg waves-attach" href="javascript:void(0)"><span class="icon icon-lg margin-right">account_box</span>Profile Settings</a>
</li>
<li>
<a class="padding-right-lg waves-attach" href="javascript:void(0)"><span class="icon icon-lg margin-right">add_to_photos</span>Upload Photo</a>
</li>
<li>
<a class="padding-right-lg waves-attach" href="page-login.html"><span class="icon icon-lg margin-right">exit_to_app</span>Logout</a>
</li>
</ul>
</li>
</ul>
</header>
<nav aria-hidden="true" class="menu" id="doc_menu" tabindex="-1">
<div class="menu-scroll">
<div class="menu-content">
<a class="menu-logo" href="index.html">Material</a>
<ul class="nav">
<li>
<a class="collapsed waves-attach" data-toggle="collapse" href="#doc_menu_components">Components</a>
<ul class="menu-collapse collapse" id="doc_menu_components">
<li>
<a class="waves-attach" href="ui-button.html">Buttons</a>
</li>
<li>
<a class="waves-attach" href="ui-button-fab.html">Buttons<small class="margin-left-xs">(Floating Action Button)</small></a>
</li>
<li>
<a class="waves-attach" href="ui-card.html">Cards</a>
</li>
<li>
<a class="waves-attach" href="ui-data-table.html">Data Tables</a>
</li>
<li>
<a class="waves-attach" href="ui-dialog.html">Dialogs</a>
</li>
<li>
<a class="waves-attach" href="ui-dropdown-menu.html">Menus</a>
</li>
<li>
<a class="waves-attach" href="ui-nav-drawer.html">Navigation Drawers</a>
</li>
<li>
<a class="waves-attach" href="ui-picker.html">Pickers</a>
</li>
<li>
<a class="waves-attach" href="ui-progress.html">Progress</a>
</li>
<li>
<a class="waves-attach" href="ui-selection-control.html">Selection Controls</a>
</li>
<li>
<a class="waves-attach" href="ui-snackbar.html">Snackbars</a>
</li>
<li>
<a class="waves-attach" href="ui-tab.html">Tabs</a>
</li>
<li>
<a class="waves-attach" href="ui-text-field.html">Text Fields</a>
</li>
<li>
<a class="waves-attach" href="ui-toolbar.html">Toolbars</a>
</li>
</ul>
</li>
<li>
<a class="waves-attach" data-toggle="collapse" href="#doc_menu_extras">Extras</a>
<ul class="menu-collapse collapse in" id="doc_menu_extras">
<li>
<a class="waves-attach" href="ui-avatar.html">Avatars</a>
</li>
<li>
<a class="waves-attach" href="ui-icon.html">Icons</a>
</li>
<li class="active">
<a class="waves-attach" href="ui-label.html">Labels</a>
</li>
<li>
<a class="waves-attach" href="ui-nav.html">Navs</a>
</li>
<li>
<a class="waves-attach" href="ui-tile.html">Tiles</a>
</li>
</ul>
</li>
<li>
<a class="collapsed waves-attach" data-toggle="collapse" href="#doc_menu_javascript">Javascript</a>
<ul class="menu-collapse collapse" id="doc_menu_javascript">
<li>
<a class="waves-attach" href="ui-affix.html">Affix</a>
</li>
<li>
<a class="waves-attach" href="ui-collapse.html">Collapse</a>
</li>
<li>
<a class="waves-attach" href="ui-dropdown-menu.html">Dropdown</a>
</li>
<li>
<a class="waves-attach" href="ui-modal.html">Modals</a>
</li>
<li>
<a class="waves-attach" href="ui-tab.html">Togglable Tabs</a>
</li>
</ul>
</li>
</ul>
</div>
</div>
</nav>
<main class="content">
<div class="content-header">
<div class="container">
<div class="row">
<div class="col-lg-6 col-lg-offset-3 col-md-8 col-md-offset-2">
<h1 class="content-heading">Labels</h1>
</div>
</div>
</div>
</div>
<div class="container">
<div class="row">
<div class="col-lg-6 col-lg-offset-3 col-md-8 col-md-offset-2">
<section class="content-inner margin-top-no">
<div class="card">
<div class="card-main">
<div class="card-inner">
<span class="label">Label</span>
<pre>
<span class="label"> ... </span>
</pre>
</div>
</div>
</div>
<h2 class="content-sub-heading">Colours</h2>
<div class="card">
<div class="card-main">
<div class="card-inner">
<p><span class="label label-brand margin-right">Label</span><code>.label-brand</code></p>
<p><span class="label label-brand-accent margin-right">Label</span><code>.label-brand-accent</code></p>
<p><span class="label label-green margin-right">Label</span><code>.label-green</code></p>
<p><span class="label label-orange margin-right">Label</span><code>.label-orange</code></p>
<p><span class="label label-red margin-right">Label</span><code>.label-red</code></p>
</div>
</div>
</div>
</section>
</div>
</div>
</div>
</main>
<footer class="ui-footer">
<div class="container">
<p>Material</p>
</div>
</footer>
<div class="fbtn-container">
<div class="fbtn-inner">
<a class="fbtn fbtn-lg fbtn-brand-accent waves-attach waves-circle waves-light" data-toggle="dropdown"><span class="fbtn-text">Links</span><span class="fbtn-ori icon">apps</span><span class="fbtn-sub icon">close</span></a>
<div class="fbtn-dropdown">
<a class="fbtn waves-attach waves-circle" href="https://github.com/Daemonite/material" target="_blank"><span class="fbtn-text">Fork me on GitHub</span><span class="icon">code</span></a>
<a class="fbtn fbtn-brand waves-attach waves-circle waves-light" href="https://twitter.com/daemonites" target="_blank"><span class="fbtn-text">Follow Daemon on Twitter</span><span class="icon">share</span></a>
<a class="fbtn fbtn-green waves-attach waves-circle" href="http://www.daemon.com.au/" target="_blank"><span class="fbtn-text">Visit Daemon Website</span><span class="icon">link</span></a>
</div>
</div>
</div>
<!-- js -->
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.2.0/jquery.min.js"></script>
<script src="../js/base.min.js"></script>
<!-- js for doc -->
<script src="../js/project.min.js"></script>
</body>
</html> | {
"content_hash": "c5622b58bfefc25aad6ec50f3267d2e5",
"timestamp": "",
"source": "github",
"line_count": 209,
"max_line_length": 225,
"avg_line_length": 36.9377990430622,
"alnum_prop": 0.5927461139896373,
"repo_name": "NimitzDEV/material",
"id": "589110c28af79347b0f5359a6dc19fbee8392bcc",
"size": "7720",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "templates/ui-label.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "217908"
},
{
"name": "HTML",
"bytes": "286617"
},
{
"name": "JavaScript",
"bytes": "90653"
}
],
"symlink_target": ""
} |
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=US-ASCII">
<title>basic_socket_streambuf::native_type</title>
<link rel="stylesheet" href="../../../boostbook.css" type="text/css">
<meta name="generator" content="DocBook XSL Stylesheets V1.75.2">
<link rel="home" href="../../../index.html" title="Asio">
<link rel="up" href="../basic_socket_streambuf.html" title="basic_socket_streambuf">
<link rel="prev" href="native_non_blocking/overload3.html" title="basic_socket_streambuf::native_non_blocking (3 of 3 overloads)">
<link rel="next" href="non_blocking.html" title="basic_socket_streambuf::non_blocking">
</head>
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
<table cellpadding="2" width="100%"><tr><td valign="top"><img alt="asio C++ library" width="250" height="60" src="../../../asio.png"></td></tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="native_non_blocking/overload3.html"><img src="../../../prev.png" alt="Prev"></a><a accesskey="u" href="../basic_socket_streambuf.html"><img src="../../../up.png" alt="Up"></a><a accesskey="h" href="../../../index.html"><img src="../../../home.png" alt="Home"></a><a accesskey="n" href="non_blocking.html"><img src="../../../next.png" alt="Next"></a>
</div>
<div class="section">
<div class="titlepage"><div><div><h4 class="title">
<a name="asio.reference.basic_socket_streambuf.native_type"></a><a class="link" href="native_type.html" title="basic_socket_streambuf::native_type">basic_socket_streambuf::native_type</a>
</h4></div></div></div>
<p>
<span class="emphasis"><em>Inherited from basic_socket.</em></span>
</p>
<p>
<a class="indexterm" name="idp130677280"></a>
(Deprecated: Use native_handle_type.)
The native representation of a socket.
</p>
<pre class="programlisting"><span class="keyword">typedef</span> <span class="identifier">StreamSocketService</span><span class="special">::</span><span class="identifier">native_handle_type</span> <span class="identifier">native_type</span><span class="special">;</span>
</pre>
<h6>
<a name="asio.reference.basic_socket_streambuf.native_type.h0"></a>
<span><a name="asio.reference.basic_socket_streambuf.native_type.requirements"></a></span><a class="link" href="native_type.html#asio.reference.basic_socket_streambuf.native_type.requirements">Requirements</a>
</h6>
<p>
<span class="emphasis"><em>Header: </em></span><code class="literal">asio/basic_socket_streambuf.hpp</code>
</p>
<p>
<span class="emphasis"><em>Convenience header: </em></span><code class="literal">asio.hpp</code>
</p>
</div>
<table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr>
<td align="left"></td>
<td align="right"><div class="copyright-footer">Copyright © 2003-2015 Christopher M.
Kohlhoff<p>
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>)
</p>
</div></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="native_non_blocking/overload3.html"><img src="../../../prev.png" alt="Prev"></a><a accesskey="u" href="../basic_socket_streambuf.html"><img src="../../../up.png" alt="Up"></a><a accesskey="h" href="../../../index.html"><img src="../../../home.png" alt="Home"></a><a accesskey="n" href="non_blocking.html"><img src="../../../next.png" alt="Next"></a>
</div>
</body>
</html>
| {
"content_hash": "7eb7f513a31af40306afd6b69d15c4f9",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 372,
"avg_line_length": 63.07017543859649,
"alnum_prop": 0.6534075104311544,
"repo_name": "sxlin/dist_ninja",
"id": "e41969fc14827ec332c23dd6514b783fffd54d0f",
"size": "3595",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "asio-1.10.6/doc/asio/reference/basic_socket_streambuf/native_type.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "20021"
},
{
"name": "C++",
"bytes": "4694297"
},
{
"name": "CSS",
"bytes": "16051"
},
{
"name": "Emacs Lisp",
"bytes": "3333"
},
{
"name": "HTML",
"bytes": "18242842"
},
{
"name": "Java",
"bytes": "13629"
},
{
"name": "M4",
"bytes": "9302"
},
{
"name": "Makefile",
"bytes": "767796"
},
{
"name": "Perl",
"bytes": "6547"
},
{
"name": "Protocol Buffer",
"bytes": "491"
},
{
"name": "Python",
"bytes": "54986"
},
{
"name": "Shell",
"bytes": "87511"
},
{
"name": "Vim script",
"bytes": "2623"
}
],
"symlink_target": ""
} |
@loop($arr as $val)@continue(!$loop->first && !$loop->last){{ $val }};@endloop | {
"content_hash": "44657a9bc425ce70c02cf9344ed84cc1",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 78,
"avg_line_length": 78,
"alnum_prop": 0.6025641025641025,
"repo_name": "advmaker/blade-loop",
"id": "02f640b1359e80cfb6c609772fdb2528b4e49bbc",
"size": "78",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/views/first_and_last.blade.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "643"
},
{
"name": "PHP",
"bytes": "11217"
}
],
"symlink_target": ""
} |
"""
Script to get read counts distribution
@author: Alicia Schep
"""
##### IMPORT MODULES #####
# import necessary for python
import os
from pyatac.chunk import ChunkList
from pysam import AlignmentFile
import numpy as np
def _between(x,start,end):
if x >= start and x < end:
return True
else:
return False
def get_counts(args):
"""function to get fragment sizes
"""
if args.out is None:
args.out = '.'.join(os.path.basename(args.bed).split('.')[0:-1])
chunks = ChunkList.read(args.bed)
mat = np.zeros(len(chunks), dtype=np.int)
bamHandle = AlignmentFile(args.bam)
j = 0
for chunk in chunks:
for read in bamHandle.fetch(chunk.chrom, max(0, chunk.start - args.upper), chunk.end + args.upper):
if read.is_proper_pair and not read.is_reverse:
if args.atac:
#get left position
l_pos = read.pos + 4
#get insert size
#correct by 8 base pairs to be inserion to insertion
ilen = abs(read.template_length) - 8
else:
l_pos = read.pos
ilen = abs(read.template_length)
r_pos = l_pos + ilen - 1
if _between(ilen, args.lower, args.upper) and (_between(l_pos, chunk.start, chunk.end) or _between(r_pos, chunk.start, chunk.end)):
mat[j] += 1
j += 1
bamHandle.close()
np.savetxt(args.out + ".counts.txt.gz", mat, delimiter="\n", fmt='%i')
| {
"content_hash": "00275bb90c55549833720a90cb0cc253",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 147,
"avg_line_length": 31.14,
"alnum_prop": 0.5555555555555556,
"repo_name": "GreenleafLab/NucleoATAC",
"id": "978b1440d0a1bbfec857059660f058206b6abd68",
"size": "1557",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyatac/get_counts.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "194662"
}
],
"symlink_target": ""
} |
package ru.job4j.orderbook;
import org.junit.Test;
import static java.lang.String.format;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
/**
* Тест класса Exchange.
*/
public class ExchangeTest {
/**
* Тест парсинга файла с 10 ордерами с комбинированием совпавших ордеров и выводом.
*/
@Test
public void whenExchangeSmallXMLFileThenPrintSimpleTable() {
ManualParser mp = new ManualParser("c:/projects/aeremeev/chapter_005/"
+ "src/test/java/ru/job4j/orderbook/ordersExchange.xml");
Exchange ex = new Exchange();
ex.load(mp.init());
StringBuilder builder = new StringBuilder();
builder.append(format("%n Order book: book-1%n"));
builder.append(format("%n BID - ASK%n"));
builder.append(format(" 99,80@64 - 100,00@162 %n"));
builder.append(format(" 99,70@16 - 100,20@42 %n"));
builder.append(format(" 99,50@86 - 100,40@75 %n"));
builder.append(format(" 99,40@78 - 100,50@160 %n"));
assertThat(ex.toString(), is(builder.toString()));
}
/**
* Тест парсинга и обработки основного файла orders.xml.
*/
@Test
public void whenParseAndExchangeOriginalFileThenRunTimeLessThanSixSeconds() {
Long expected = 6000L;
Long actual = System.currentTimeMillis();
ManualParser mp = new ManualParser("d:/orders.xml");
Exchange ex = new Exchange();
ex.load(mp.init());
System.out.println(ex.toString());
actual = System.currentTimeMillis() - actual;
System.out.println(format("Runtime in ms is %s", actual));
assertThat(actual < expected, is(true));
}
}
| {
"content_hash": "f34dbb06f60fc0696ff971d1630ed220",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 87,
"avg_line_length": 38.659574468085104,
"alnum_prop": 0.596587782058338,
"repo_name": "alexeremeev/aeremeev",
"id": "60947094f91b53878a920f28aa7dc3d8f2175859",
"size": "1929",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chapter_005/src/test/java/ru/job4j/orderbook/ExchangeTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "17013"
},
{
"name": "HTML",
"bytes": "22626"
},
{
"name": "Java",
"bytes": "1081461"
},
{
"name": "JavaScript",
"bytes": "50690"
},
{
"name": "PLSQL",
"bytes": "60"
},
{
"name": "XSLT",
"bytes": "296"
}
],
"symlink_target": ""
} |
<?PHP
// V4.50 6 July 2004
error_reporting(E_ALL);
include_once("../adodb.inc.php");
include_once("../adodb-xmlschema03.inc.php");
// To build the schema, start by creating a normal ADOdb connection:
$db = ADONewConnection( 'mysql' );
$db->Connect( 'localhost', 'root', '', 'test' ) || die('fail connect1');
// To create a schema object and build the query array.
$schema = new adoSchema( $db );
// To upgrade an existing schema object, use the following
// To upgrade an existing database to the provided schema,
// uncomment the following line:
#$schema->upgradeSchema();
print "<b>SQL to build xmlschema.xml</b>:\n<pre>";
// Build the SQL array
$sql = $schema->ParseSchema( "xmlschema.xml" );
var_dump( $sql );
print "</pre>\n";
// Execute the SQL on the database
//$result = $schema->ExecuteSchema( $sql );
// Finally, clean up after the XML parser
// (PHP won't do this for you!)
//$schema->Destroy();
print "<b>SQL to build xmlschema-mssql.xml</b>:\n<pre>";
$db2 = ADONewConnection('mssql');
$db2->Connect('','adodb','natsoft','northwind') || die("Fail 2");
$db2->Execute("drop table simple_table");
$schema = new adoSchema( $db2 );
$sql = $schema->ParseSchema( "xmlschema-mssql.xml" );
print_r( $sql );
print "</pre>\n";
$db2->debug=1;
foreach ($sql as $s)
$db2->Execute($s);
?> | {
"content_hash": "b6e664ab36ecf2467491d17c470d5a4f",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 72,
"avg_line_length": 24.14814814814815,
"alnum_prop": 0.6602760736196319,
"repo_name": "SoundWavezZzZz/project-snake",
"id": "823df4e4e529dac94d32c3fd6bed0072472b6c88",
"size": "1304",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dep/adodb5/tests/test-xmlschema.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1712"
},
{
"name": "PHP",
"bytes": "2754487"
},
{
"name": "XSLT",
"bytes": "28086"
}
],
"symlink_target": ""
} |
package owusu.agyei.liz.tryy;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Intent;
import android.support.v4.app.NotificationCompat;
import android.util.Log;
import com.google.firebase.messaging.FirebaseMessagingService;
import com.google.firebase.messaging.RemoteMessage;
public class MyFirebaseMessagingService extends FirebaseMessagingService {
private static final String TAG = "MyFMService";
@Override
public void onMessageReceived(RemoteMessage remoteMessage) {
// Handle data payload of FCM messages.
try {
if (remoteMessage.getData() != null) {
BaseActivity.addNotificationToCache(this, remoteMessage);
if (!MyApp.isActivityVisible()) {
showNotificationInSystemTray(remoteMessage);
}
Intent intent = new Intent();
intent.setAction("ACTION_DETAILS");
intent.putExtra("texting", (remoteMessage.getData().containsKey("texting")) ? remoteMessage.getData().get("texting") : "");
sendBroadcast(intent);
Log.d(TAG, "FCM Message Id: " + remoteMessage.getMessageId());
//Log.d(TAG, "FCM Notification Message: " + remoteMessage.getNotification().getBody());
Log.d(TAG, "FCM Data Message: " + remoteMessage.getData());
}
}catch (Exception e){
e.printStackTrace();
}
}
private void showNotificationInSystemTray(RemoteMessage remoteMessage) {
String paddedMessage = (BaseActivity.getNotificationCount(this) > 1 ? " and "+ (BaseActivity.getNotificationCount(this) - 1) + " more Notifications":"");
Intent actionIntent = new Intent("ACTIVITY_DETAILS");
actionIntent.putExtra("texting", (remoteMessage.getData().containsKey("texting")) ? remoteMessage.getData().get("texting") : "");
PendingIntent pi = PendingIntent.getActivity(this, 0, actionIntent, 0);
Notification notification = new NotificationCompat.Builder(this)
.setTicker((remoteMessage.getData().containsKey("title")) ? remoteMessage.getData().get("title") : "")
.setSmallIcon(R.mipmap.ic_launcher)
.setContentTitle((remoteMessage.getData().containsKey("title")) ? remoteMessage.getData().get("title") : "")
.setContentText((remoteMessage.getData().containsKey("description")) ? remoteMessage.getData().get("description")+ paddedMessage: "")
.setContentIntent(pi)
.setAutoCancel(true)
.build();
NotificationManager notificationManager = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
notificationManager.notify(0, notification);
}
}
| {
"content_hash": "9fa7914af1658be3adcfdb9865a0f47b",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 161,
"avg_line_length": 48.067796610169495,
"alnum_prop": 0.6583215796897038,
"repo_name": "AndroidCoder1/FirebaseCloudMessaging",
"id": "7924b6df74c1f9589f4091b9f17f54986f12f299",
"size": "3443",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/java/owusu/agyei/liz/tryy/MyFirebaseMessagingService.java",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "17198"
}
],
"symlink_target": ""
} |
'use strict';
module.exports = require('./lib/psm');
| {
"content_hash": "5c4782d4734fc5112b5712a0b222a762",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 38,
"avg_line_length": 11.2,
"alnum_prop": 0.625,
"repo_name": "xudafeng/psm",
"id": "8106caf98629ead3b65eca691fccd9765e9d0c9f",
"size": "491",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "index.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "2356"
},
{
"name": "Makefile",
"bytes": "681"
}
],
"symlink_target": ""
} |
#include "slib/core/definition.h"
#if defined(SLIB_GRAPHICS_IS_GDI)
#include "slib/graphics/brush.h"
#include "slib/graphics/image.h"
#include "slib/graphics/platform.h"
#include "slib/core/scoped.h"
namespace slib
{
namespace priv
{
namespace gdi
{
class BrushPlatformObject : public Referable
{
public:
Gdiplus::Brush* m_brush;
Ref<Drawable> m_drawableCache;
public:
BrushPlatformObject(const BrushDesc& desc)
{
GraphicsPlatform::startGdiplus();
m_brush = NULL;
if (desc.style == BrushStyle::Solid) {
const Color& _color = desc.color;
Gdiplus::Color color(_color.a, _color.r, _color.g, _color.b);
m_brush = new Gdiplus::SolidBrush(color);
} else if (desc.style == BrushStyle::LinearGradient || desc.style == BrushStyle::RadialGradient) {
GradientBrushDetail* detail = (GradientBrushDetail*)(desc.detail.get());
if (detail) {
ListElements<Color> _colors(detail->colors);
ListElements<sl_real> _locations(detail->locations);
sl_size n = _colors.count;
SLIB_SCOPED_BUFFER(Gdiplus::Color, 128, colors, n);
SLIB_SCOPED_BUFFER(Gdiplus::REAL, 128, locations, n);
if (desc.style == BrushStyle::LinearGradient) {
for (sl_size i = 0; i < n; i++) {
colors[i] = Gdiplus::Color(_colors[i].a, _colors[i].r, _colors[i].g, _colors[i].b);
locations[i] = (Gdiplus::REAL)(_locations[i]);
}
Gdiplus::PointF pt1((Gdiplus::REAL)(detail->point1.x), (Gdiplus::REAL)(detail->point1.y));
Gdiplus::PointF pt2((Gdiplus::REAL)(detail->point2.x), (Gdiplus::REAL)(detail->point2.y));
Gdiplus::LinearGradientBrush* brush = new Gdiplus::LinearGradientBrush(pt1, pt2, colors[0], colors[1]);
if (brush) {
brush->SetWrapMode(Gdiplus::WrapModeTileFlipXY);
if (n > 2) {
brush->SetInterpolationColors(colors, locations, (INT)n);
}
m_brush = brush;
}
} else {
Gdiplus::GraphicsPath path;
path.AddEllipse((Gdiplus::REAL)(detail->point1.x - detail->radius), (Gdiplus::REAL)(detail->point1.y - detail->radius), (Gdiplus::REAL)(detail->radius * 2), (Gdiplus::REAL)(detail->radius * 2));
Gdiplus::PathGradientBrush* brush = new Gdiplus::PathGradientBrush(&path);
if (brush) {
if (n > 2) {
for (sl_size i = 0; i < n; i++) {
Color& color = _colors[n - 1 - i];
colors[i] = Gdiplus::Color(color.a, color.r, color.g, color.b);
locations[i] = (Gdiplus::REAL)(1 - _locations[n - 1 - i]);
}
brush->SetInterpolationColors(colors, locations, (INT)n);
} else {
Gdiplus::Color c0(_colors[0].a, _colors[0].r, _colors[0].g, _colors[0].b);
Gdiplus::Color c1(_colors[1].a, _colors[1].r, _colors[1].g, _colors[1].b);
brush->SetCenterColor(c0);
INT k = 1;
brush->SetSurroundColors(&c1, &k);
}
brush->SetCenterPoint(Gdiplus::PointF((Gdiplus::REAL)(detail->point1.x), (Gdiplus::REAL)(detail->point1.y)));
m_brush = brush;
}
}
}
} else if (desc.style == BrushStyle::Texture) {
TextureBrushDetail* detail = (TextureBrushDetail*)(desc.detail.get());
if (detail) {
Bitmap* pattern = detail->pattern.get();
if (pattern->isImage()) {
Ref<Drawable> drawable = PlatformDrawable::create((Image*)pattern);
if (drawable.isNotNull()) {
Gdiplus::Image* image = GraphicsPlatform::getImageDrawableHandle(drawable.get());
if (image) {
m_brush = new Gdiplus::TextureBrush(image);
m_drawableCache = drawable;
}
}
} else {
Gdiplus::Bitmap* bitmap = GraphicsPlatform::getBitmapHandle(pattern);
if (bitmap) {
m_brush = new Gdiplus::TextureBrush(bitmap);
}
}
}
}
}
~BrushPlatformObject()
{
delete m_brush;
}
};
class BrushHelper : public Brush
{
public:
BrushPlatformObject* getPlatformObject()
{
if (m_platformObject.isNull()) {
SpinLocker lock(&m_lock);
if (m_platformObject.isNull()) {
m_platformObject = new BrushPlatformObject(m_desc);
}
}
return (BrushPlatformObject*)(m_platformObject.get());
}
Gdiplus::Brush* getPlatformHandle()
{
BrushPlatformObject* po = getPlatformObject();
if (po) {
return po->m_brush;
}
return NULL;
}
};
}
}
using namespace priv::gdi;
Gdiplus::Brush* GraphicsPlatform::getBrushHandle(Brush* brush)
{
if (brush) {
return ((BrushHelper*)brush)->getPlatformHandle();
}
return NULL;
}
}
#endif
| {
"content_hash": "99ebb04dec2ef1cbf22a524348de2137",
"timestamp": "",
"source": "github",
"line_count": 152,
"max_line_length": 202,
"avg_line_length": 31.06578947368421,
"alnum_prop": 0.5902160101651842,
"repo_name": "SLIBIO/SLib",
"id": "0910db5d9839d87511909dad00869551866a8d4a",
"size": "5897",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/slib/graphics/brush_gdi.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "12324"
},
{
"name": "C",
"bytes": "172728"
},
{
"name": "C++",
"bytes": "8974152"
},
{
"name": "CMake",
"bytes": "25579"
},
{
"name": "Java",
"bytes": "412451"
},
{
"name": "Objective-C",
"bytes": "13961"
},
{
"name": "Objective-C++",
"bytes": "641784"
},
{
"name": "PHP",
"bytes": "306070"
},
{
"name": "Shell",
"bytes": "4835"
},
{
"name": "SourcePawn",
"bytes": "19475"
}
],
"symlink_target": ""
} |
namespace FactoryMethod
{
internal class NYStyleCheesePizza : Pizza
{
public NYStyleCheesePizza()
{
// Нью-йорксая пицца готовиться с соусом "маринара" на тонкой основе
Name = "NY Style Sauce and Cheese Pizza";
Dough = "Thin Crust Dough";
Sauce = "Marinara Sauce";
// Одна добавка: сыр "реджано"
Toppings.Add("Grated Reggiano Cheese");
}
}
}
| {
"content_hash": "dbf99948fadde21b684fb7f07c5565b7",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 71,
"avg_line_length": 23.1875,
"alnum_prop": 0.6873315363881402,
"repo_name": "NeverNight/SimplesPatterns.CSharp",
"id": "62cfc9412a367dfc5c1fa0134fc42b11b3554f48",
"size": "448",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Factory/FactoryMethod/NYStyleCheesePizza.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "221972"
},
{
"name": "Smalltalk",
"bytes": "493612"
}
],
"symlink_target": ""
} |
function signOut() {
var auth2 = gapi.auth2.getAuthInstance();
auth2.disconnect().then(function () {
location = "/fat/";
window.replace(location);
});
} | {
"content_hash": "99692713ea940f1f58d8c0db217158e0",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 44,
"avg_line_length": 22,
"alnum_prop": 0.6022727272727273,
"repo_name": "baydinsoftware/FAT",
"id": "fabb99d8426adb048c6ef4ec12363aacff2bb493",
"size": "176",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fatproject/fat/static/fat/js/logout.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "503"
},
{
"name": "HTML",
"bytes": "26379"
},
{
"name": "JavaScript",
"bytes": "88957"
},
{
"name": "Python",
"bytes": "29630"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_04) on Thu Jan 10 14:33:56 CET 2013 -->
<TITLE>
LatestRevisionStrategy
</TITLE>
<META NAME="date" CONTENT="2013-01-10">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="LatestRevisionStrategy";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../org/apache/ivy/plugins/latest/LatestLexicographicStrategy.html" title="class in org.apache.ivy.plugins.latest"><B>PREV CLASS</B></A>
<A HREF="../../../../../org/apache/ivy/plugins/latest/LatestRevisionStrategy.SpecialMeaning.html" title="class in org.apache.ivy.plugins.latest"><B>NEXT CLASS</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/apache/ivy/plugins/latest/LatestRevisionStrategy.html" target="_top"><B>FRAMES</B></A>
<A HREF="LatestRevisionStrategy.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: <A HREF="#nested_class_summary">NESTED</A> | FIELD | <A HREF="#constructor_summary">CONSTR</A> | <A HREF="#method_summary">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: FIELD | <A HREF="#constructor_detail">CONSTR</A> | <A HREF="#method_detail">METHOD</A></FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<!-- ======== START OF CLASS DATA ======== -->
<H2>
<FONT SIZE="-1">
org.apache.ivy.plugins.latest</FONT>
<BR>
Class LatestRevisionStrategy</H2>
<PRE>
java.lang.Object
<IMG SRC="../../../../../resources/inherit.gif" ALT="extended by "><A HREF="../../../../../org/apache/ivy/plugins/latest/AbstractLatestStrategy.html" title="class in org.apache.ivy.plugins.latest">org.apache.ivy.plugins.latest.AbstractLatestStrategy</A>
<IMG SRC="../../../../../resources/inherit.gif" ALT="extended by "><A HREF="../../../../../org/apache/ivy/plugins/latest/ComparatorLatestStrategy.html" title="class in org.apache.ivy.plugins.latest">org.apache.ivy.plugins.latest.ComparatorLatestStrategy</A>
<IMG SRC="../../../../../resources/inherit.gif" ALT="extended by "><B>org.apache.ivy.plugins.latest.LatestRevisionStrategy</B>
</PRE>
<DL>
<DT><B>All Implemented Interfaces:</B> <DD><A HREF="../../../../../org/apache/ivy/plugins/latest/LatestStrategy.html" title="interface in org.apache.ivy.plugins.latest">LatestStrategy</A></DD>
</DL>
<HR>
<DL>
<DT><PRE>public class <B>LatestRevisionStrategy</B><DT>extends <A HREF="../../../../../org/apache/ivy/plugins/latest/ComparatorLatestStrategy.html" title="class in org.apache.ivy.plugins.latest">ComparatorLatestStrategy</A></DL>
</PRE>
<P>
<HR>
<P>
<!-- ======== NESTED CLASS SUMMARY ======== -->
<A NAME="nested_class_summary"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Nested Class Summary</B></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/ivy/plugins/latest/LatestRevisionStrategy.SpecialMeaning.html" title="class in org.apache.ivy.plugins.latest">LatestRevisionStrategy.SpecialMeaning</A></B></CODE>
<BR>
</TD>
</TR>
</TABLE>
<!-- ======== CONSTRUCTOR SUMMARY ======== -->
<A NAME="constructor_summary"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Constructor Summary</B></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE><B><A HREF="../../../../../org/apache/ivy/plugins/latest/LatestRevisionStrategy.html#LatestRevisionStrategy()">LatestRevisionStrategy</A></B>()</CODE>
<BR>
</TD>
</TR>
</TABLE>
<!-- ========== METHOD SUMMARY =========== -->
<A NAME="method_summary"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Method Summary</B></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/ivy/plugins/latest/LatestRevisionStrategy.html#addConfiguredSpecialMeaning(org.apache.ivy.plugins.latest.LatestRevisionStrategy.SpecialMeaning)">addConfiguredSpecialMeaning</A></B>(<A HREF="../../../../../org/apache/ivy/plugins/latest/LatestRevisionStrategy.SpecialMeaning.html" title="class in org.apache.ivy.plugins.latest">LatestRevisionStrategy.SpecialMeaning</A> meaning)</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.Map</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/ivy/plugins/latest/LatestRevisionStrategy.html#getSpecialMeanings()">getSpecialMeanings</A></B>()</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> boolean</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/ivy/plugins/latest/LatestRevisionStrategy.html#isUsedefaultspecialmeanings()">isUsedefaultspecialmeanings</A></B>()</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/ivy/plugins/latest/LatestRevisionStrategy.html#setUsedefaultspecialmeanings(boolean)">setUsedefaultspecialmeanings</A></B>(boolean usedefaultspecialmeanings)</CODE>
<BR>
</TD>
</TR>
</TABLE>
<A NAME="methods_inherited_from_class_org.apache.ivy.plugins.latest.ComparatorLatestStrategy"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left"><B>Methods inherited from class org.apache.ivy.plugins.latest.<A HREF="../../../../../org/apache/ivy/plugins/latest/ComparatorLatestStrategy.html" title="class in org.apache.ivy.plugins.latest">ComparatorLatestStrategy</A></B></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE><A HREF="../../../../../org/apache/ivy/plugins/latest/ComparatorLatestStrategy.html#getComparator()">getComparator</A>, <A HREF="../../../../../org/apache/ivy/plugins/latest/ComparatorLatestStrategy.html#setComparator(java.util.Comparator)">setComparator</A>, <A HREF="../../../../../org/apache/ivy/plugins/latest/ComparatorLatestStrategy.html#sort(org.apache.ivy.plugins.latest.ArtifactInfo[])">sort</A></CODE></TD>
</TR>
</TABLE>
<A NAME="methods_inherited_from_class_org.apache.ivy.plugins.latest.AbstractLatestStrategy"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left"><B>Methods inherited from class org.apache.ivy.plugins.latest.<A HREF="../../../../../org/apache/ivy/plugins/latest/AbstractLatestStrategy.html" title="class in org.apache.ivy.plugins.latest">AbstractLatestStrategy</A></B></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE><A HREF="../../../../../org/apache/ivy/plugins/latest/AbstractLatestStrategy.html#findLatest(org.apache.ivy.plugins.latest.ArtifactInfo[], java.util.Date)">findLatest</A>, <A HREF="../../../../../org/apache/ivy/plugins/latest/AbstractLatestStrategy.html#getName()">getName</A>, <A HREF="../../../../../org/apache/ivy/plugins/latest/AbstractLatestStrategy.html#setName(java.lang.String)">setName</A>, <A HREF="../../../../../org/apache/ivy/plugins/latest/AbstractLatestStrategy.html#toString()">toString</A></CODE></TD>
</TR>
</TABLE>
<A NAME="methods_inherited_from_class_java.lang.Object"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left"><B>Methods inherited from class java.lang.Object</B></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE>clone, equals, finalize, getClass, hashCode, notify, notifyAll, wait, wait, wait</CODE></TD>
</TR>
</TABLE>
<P>
<!-- ========= CONSTRUCTOR DETAIL ======== -->
<A NAME="constructor_detail"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2">
<B>Constructor Detail</B></FONT></TH>
</TR>
</TABLE>
<A NAME="LatestRevisionStrategy()"><!-- --></A><H3>
LatestRevisionStrategy</H3>
<PRE>
public <B>LatestRevisionStrategy</B>()</PRE>
<DL>
</DL>
<!-- ============ METHOD DETAIL ========== -->
<A NAME="method_detail"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2">
<B>Method Detail</B></FONT></TH>
</TR>
</TABLE>
<A NAME="addConfiguredSpecialMeaning(org.apache.ivy.plugins.latest.LatestRevisionStrategy.SpecialMeaning)"><!-- --></A><H3>
addConfiguredSpecialMeaning</H3>
<PRE>
public void <B>addConfiguredSpecialMeaning</B>(<A HREF="../../../../../org/apache/ivy/plugins/latest/LatestRevisionStrategy.SpecialMeaning.html" title="class in org.apache.ivy.plugins.latest">LatestRevisionStrategy.SpecialMeaning</A> meaning)</PRE>
<DL>
<DD><DL>
</DL>
</DD>
</DL>
<HR>
<A NAME="getSpecialMeanings()"><!-- --></A><H3>
getSpecialMeanings</H3>
<PRE>
public java.util.Map <B>getSpecialMeanings</B>()</PRE>
<DL>
<DD><DL>
</DL>
</DD>
</DL>
<HR>
<A NAME="isUsedefaultspecialmeanings()"><!-- --></A><H3>
isUsedefaultspecialmeanings</H3>
<PRE>
public boolean <B>isUsedefaultspecialmeanings</B>()</PRE>
<DL>
<DD><DL>
</DL>
</DD>
</DL>
<HR>
<A NAME="setUsedefaultspecialmeanings(boolean)"><!-- --></A><H3>
setUsedefaultspecialmeanings</H3>
<PRE>
public void <B>setUsedefaultspecialmeanings</B>(boolean usedefaultspecialmeanings)</PRE>
<DL>
<DD><DL>
</DL>
</DD>
</DL>
<!-- ========= END OF CLASS DATA ========= -->
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../org/apache/ivy/plugins/latest/LatestLexicographicStrategy.html" title="class in org.apache.ivy.plugins.latest"><B>PREV CLASS</B></A>
<A HREF="../../../../../org/apache/ivy/plugins/latest/LatestRevisionStrategy.SpecialMeaning.html" title="class in org.apache.ivy.plugins.latest"><B>NEXT CLASS</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/apache/ivy/plugins/latest/LatestRevisionStrategy.html" target="_top"><B>FRAMES</B></A>
<A HREF="LatestRevisionStrategy.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: <A HREF="#nested_class_summary">NESTED</A> | FIELD | <A HREF="#constructor_summary">CONSTR</A> | <A HREF="#method_summary">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: FIELD | <A HREF="#constructor_detail">CONSTR</A> | <A HREF="#method_detail">METHOD</A></FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
</BODY>
</HTML>
| {
"content_hash": "1f9e43be45a8e5d143515af9ea1f4461",
"timestamp": "",
"source": "github",
"line_count": 347,
"max_line_length": 528,
"avg_line_length": 46.14985590778098,
"alnum_prop": 0.6545522667665793,
"repo_name": "adriancmiranda/flash-compiler",
"id": "c20ba2db82a4c868c2348520e356d40fb632da17",
"size": "16014",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ApacheFlexSDK4/ant/ivy/doc/reports/api/org/apache/ivy/plugins/latest/LatestRevisionStrategy.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ActionScript",
"bytes": "33714531"
},
{
"name": "Batchfile",
"bytes": "65217"
},
{
"name": "C",
"bytes": "52818"
},
{
"name": "CSS",
"bytes": "576224"
},
{
"name": "HTML",
"bytes": "51021362"
},
{
"name": "Java",
"bytes": "190833"
},
{
"name": "JavaScript",
"bytes": "599657"
},
{
"name": "Objective-C",
"bytes": "79911"
},
{
"name": "Perl",
"bytes": "9844"
},
{
"name": "Python",
"bytes": "3299"
},
{
"name": "R",
"bytes": "1782"
},
{
"name": "Shell",
"bytes": "149532"
},
{
"name": "Visual Basic",
"bytes": "3122"
},
{
"name": "XSLT",
"bytes": "977241"
}
],
"symlink_target": ""
} |
class RemoveCreditTotalFromOrders < ActiveRecord::Migration
def change
remove_column :spree_orders, :credit_total
end
end
| {
"content_hash": "1d0ef093d3f3d52941eb2bcc9c26fd3e",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 59,
"avg_line_length": 26,
"alnum_prop": 0.7846153846153846,
"repo_name": "ambertch/stylestalk-spree",
"id": "3bfdf97bd7bf8f8ddc78dba879de218d9c22f315",
"size": "130",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "core/db/migrate/20120604203654_remove_credit_total_from_orders.rb",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CoffeeScript",
"bytes": "5924"
},
{
"name": "JavaScript",
"bytes": "22303"
},
{
"name": "Ruby",
"bytes": "1032474"
},
{
"name": "Shell",
"bytes": "358"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<TextView xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical">
</TextView> | {
"content_hash": "9918c149c538d63c122854746bf66dcb",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 68,
"avg_line_length": 34.714285714285715,
"alnum_prop": 0.7037037037037037,
"repo_name": "fuhongliang/CuiTrip",
"id": "e7a97077e63d9ec858c422b638d6f99d4116f3a5",
"size": "243",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "TripApp/src/main/res/layout/ct_checked_text.xml",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "7988"
},
{
"name": "Java",
"bytes": "1252509"
}
],
"symlink_target": ""
} |
export enum BreakpointEvent { Add, Remove }
export class Breakpoints {
private breakpoints: { [address: string]: AtomCore.IDisplayBufferMarker };
private listeners: (((address: string) => void)[])[];
constructor() {
this.breakpoints = {};
this.listeners = [];
this.listeners[BreakpointEvent.Add] = [];
this.listeners[BreakpointEvent.Remove] = [];
}
public toggle(): void {
var editor = atom.workspace.getActiveTextEditor();
var row = editor.getCursorBufferPosition().row;
var key = editor.buffer.file.path + ":" + (row + 1);
if (this.breakpoints[key]) {
this.breakpoints[key].destroy();
delete this.breakpoints[key];
this.notify(BreakpointEvent.Remove, key);
} else {
var marker = editor.markBufferRange([[row, 0], [row, 0]], { invalidate: 'never' })
var decoration = editor.decorateMarker(marker, { type: 'line-number', class: "atom-delve-breakpoint" })
this.breakpoints[key] = marker;
this.notify(BreakpointEvent.Add, key);
}
}
private notify(event: BreakpointEvent, address: string): void {
for (var listener of this.listeners[event]) {
listener(address);
}
}
public addListener(event: BreakpointEvent, listener: (address: string) => void): void {
this.listeners[event].push(listener);
}
public removeListener(event: BreakpointEvent, listener: (address: string) => void): void {
this.listeners[event] = this.listeners[event].filter(function(fn: (address: string) => void) {
return fn != listener;
});
}
public getBreakpoints(): string[] {
var keys = [];
for (var key in this.breakpoints) {
keys.push(key);
}
return keys;
}
}
| {
"content_hash": "4a6a05052b49e4ca879a39822c7c3c2c",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 109,
"avg_line_length": 32.056603773584904,
"alnum_prop": 0.646262507357269,
"repo_name": "flec/atom-delve",
"id": "91ff55e0c73fef112eb86a5fe31b07b58405d3cd",
"size": "1699",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/breakpoints.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "749"
},
{
"name": "CoffeeScript",
"bytes": "1160"
},
{
"name": "JavaScript",
"bytes": "9745"
},
{
"name": "TypeScript",
"bytes": "8522"
}
],
"symlink_target": ""
} |
namespace storage
{
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// Extension methods for UsageOperations.
/// </summary>
public static partial class UsageOperationsExtensions
{
/// <summary>
/// Gets the current usage count and the limit for the resources under the
/// subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static IEnumerable<Usage> List(this IUsageOperations operations)
{
return operations.ListAsync().GetAwaiter().GetResult();
}
/// <summary>
/// Gets the current usage count and the limit for the resources under the
/// subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IEnumerable<Usage>> ListAsync(this IUsageOperations operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| {
"content_hash": "e76d3f947ab37c48495adcb5782ee320",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 166,
"avg_line_length": 35.851063829787236,
"alnum_prop": 0.568545994065282,
"repo_name": "jianghaolu/AutoRest",
"id": "7f0680ee3c2df3d29beb1b555d6a36dcda33db8b",
"size": "1841",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Samples/2a-arm-validation/CSharp/UsageOperationsExtensions.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "15054795"
},
{
"name": "CSS",
"bytes": "110"
},
{
"name": "CoffeeScript",
"bytes": "63158"
},
{
"name": "Go",
"bytes": "147575"
},
{
"name": "HTML",
"bytes": "274"
},
{
"name": "Java",
"bytes": "7894733"
},
{
"name": "JavaScript",
"bytes": "6958333"
},
{
"name": "PowerShell",
"bytes": "37989"
},
{
"name": "Python",
"bytes": "2080397"
},
{
"name": "Ruby",
"bytes": "182108"
},
{
"name": "Shell",
"bytes": "142"
},
{
"name": "TypeScript",
"bytes": "432413"
}
],
"symlink_target": ""
} |
package de.dbis.acis.cloud.Tethys.client;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import javax.servlet.ServletOutputStream;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.StreamingOutput;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.ClientResponse.Status;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.client.config.ClientConfig;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.api.json.JSONConfiguration;
import de.dbis.acis.cloud.Tethys.message.client.MessageAuth;
import de.dbis.acis.cloud.Tethys.message.server.SMessageAuth;
import de.dbis.acis.cloud.Tethys.util.GsonMessageBodyHandler;
/**
* Contains all methods to communicate with Openstack.
*
* @author Gordon Lawrenz <lawrenz@dbis.rwth-aachen.de>
*/
public class OpenstackClient {
// TODO hardcodet!?
private static ClientConfig cfg = null;
private static String protocol = "http://";
private static String externalOpenstackIP ="137.226.58.2";
private static String internalOpenstackIP ="10.255.255.3";
private static String openstackIPForPublishing = externalOpenstackIP;
private static String portKeystoneAdmin = ":35357";
private static String portKeystoneMember = ":5000";
private static String portNovaMember = ":8774";
private static String portSwiftMember = ":8888";
private static String portGlanceMember = ":9292";
/**
* Returns a special ClientConfig to communicate with Openstack.
*
* @return ClientConfig
*/
private static ClientConfig returnClientConfig() {
if(cfg == null) {
cfg = new DefaultClientConfig();
cfg.getClasses().add(GsonMessageBodyHandler.class);
cfg.getFeatures().put(JSONConfiguration.FEATURE_POJO_MAPPING, true);
}
return cfg;
}
/**
* Authenticates against Openstack.
* <p>
* input: {"service":""}, {"username":""}, {"password":""}
* <p>
* output: like openstack gives it back.
*
* @param tenantName
* @param username
* @param password
* @return JsonObject
*/
public static JsonObject authOpenstack(SMessageAuth smessage, boolean admin) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+ (admin?portKeystoneAdmin:portKeystoneMember) +"/v2.0/tokens");
MessageAuth message = new MessageAuth(smessage);
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).post(ClientResponse.class,message);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
// TODO refactor
/**
* Manipulates the Auth-Response of Openstack.
* <p>
* input: {"service":""}, {"username":""}, {"password":""}
* <p>
* output {"X-Auth-Token":"","expires":"","service-id":"","swift-url":""}
*
* @param tenantName
* @param username
* @param password
* @return JsonObject
*/
public static JsonObject manipulateAuthAndReturnToken(SMessageAuth smessage) {
JsonObject output = null;
JsonObject response = authOpenstack(smessage,false);
if(response!=null) {
output = new JsonObject();
output.add("X-Auth-Token", response.getAsJsonObject("access").getAsJsonObject("token").get("id"));
output.add("expires", response.getAsJsonObject("access").getAsJsonObject("token").get("expires"));
//output.add("tenant-id", response.getAsJsonObject("access").getAsJsonObject("token").getAsJsonObject("tenant").get("id"));
output.addProperty("swift-url", protocol+openstackIPForPublishing+portSwiftMember+"/v1/AUTH_"+
response.getAsJsonObject("access").getAsJsonObject("token").getAsJsonObject("tenant").get("id").getAsString());
}
return output;
}
/**
* Manipulates the Auth-Response of Openstack.
* <p>
* input: {"service":""}, {"username":""}, {"password":""}
* <p>
* output {"X-Auth-Token":"","expires":"","service-id":"","swift-url":""}
*
* @param tenantName
* @param username
* @param password
* @return JsonObject
*/
public static JsonObject adminAuth(SMessageAuth smessage) {
JsonObject output = null;
JsonObject response = authOpenstack(smessage,true);
if(response!=null) {
output = new JsonObject();
output.add("X-Auth-Token", response.getAsJsonObject("access").getAsJsonObject("token").get("id"));
output.add("expires", response.getAsJsonObject("access").getAsJsonObject("token").get("expires"));
output.add("tenant-id", response.getAsJsonObject("access").getAsJsonObject("token").getAsJsonObject("tenant").get("id"));
}
return output;
}
/**
* Gets the limits of a service/tenant in Openstack.
* <p>
* output: like openstack gives it back.
*
* @param xAuthToken
* @param tenantId
* @return JsonObject
*/
public static JsonObject serviceLimits(String xAuthToken, String tenantId) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portNovaMember+"/v2/"+tenantId+"/limits");
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).get(ClientResponse.class);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
/**
* Creates an instance for an service/tenant.
* <p>
* output: like openstack gives it back.
*
* @param xAuthToken
* @param tenantId
* @param name
* @param script
* @param imageRef
* @param flavorRef
* @return JsonObject
*/
public static JsonObject createInstance(String xAuthToken, String tenantId, JsonElement name, JsonElement script, JsonElement imageRef ,JsonElement flavorRef) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portNovaMember+"/v2/"+tenantId+"/servers");
JsonObject request = new JsonObject();
JsonObject serverdata = new JsonObject();
serverdata.add("name", name);
serverdata.add("user_data", script);
serverdata.add("imageRef", imageRef);
serverdata.add("flavorRef", flavorRef);
serverdata.addProperty("max_count", "1");
serverdata.addProperty("min_count", "1");
request.add("server", serverdata);
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).type(MediaType.APPLICATION_JSON).post(ClientResponse.class,request);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.ACCEPTED) {
output = response.getEntity(JsonObject.class);
}
return output;
}
/**
* Uploads a file to the given container of a service/tenant to Swift.
*
* @param bis
* @param xAuthToken
* @param tenantid
* @param path
* @return ResponseBuilder
* @throws MalformedURLException
* @throws IOException
*/
public static ResponseBuilder uploadFile(InputStream bis, String xAuthToken, String tenantid, String path ) throws MalformedURLException, IOException {
URLConnection urlconnection=null;
urlconnection = (HttpURLConnection) new URL(protocol+internalOpenstackIP+portSwiftMember+"/v1/AUTH_"+tenantid+"/"+path).openConnection();
urlconnection.setDoOutput(true);
urlconnection.setDoInput(true);
if (urlconnection instanceof HttpURLConnection) {
((HttpURLConnection)urlconnection).setRequestMethod("PUT");
((HttpURLConnection)urlconnection).setChunkedStreamingMode(16384);//1024? 4096? 16384?
((HttpURLConnection)urlconnection).setRequestProperty("X-Auth-Token", xAuthToken);
((HttpURLConnection)urlconnection).connect();
}
DataOutputStream bos = new DataOutputStream(urlconnection.getOutputStream());
int i;
while ((i = bis.read()) != -1) {
bos.write(i);
}
bis.close();
bos.flush();
bos.close();
//System.out.println(((HttpURLConnection)urlconnection).getResponseMessage());
//System.out.println(((HttpURLConnection)urlconnection).getContentLength());
//InputStream inputStream = ((HttpURLConnection)urlconnection).getInputStream();
//Object responseObject = ((HttpURLConnection)urlconnection).getContent();
//String responseType = ((HttpURLConnection)urlconnection).getContentType();
int responseCode = ((HttpURLConnection)urlconnection).getResponseCode();
// if ((responseCode>= 200) &&(responseCode<=202) ) {
//
// inputStream = ((HttpURLConnection)urlconnection).getInputStream();
// int j;
// while ((j = inputStream.read()) >0) {
//
// System.out.print((char)j);
//
// }
//
// } else {
//
// inputStream = ((HttpURLConnection)urlconnection).getErrorStream();
//
// }
//inputStream.close();
((HttpURLConnection)urlconnection).disconnect();
JsonObject responseObject = null;
if(responseCode == 201) {
responseObject = new JsonObject();
responseObject.addProperty("swift-url", protocol+openstackIPForPublishing+portSwiftMember+"/v1/AUTH_"+tenantid+"/"+path);
}
return Response.ok(responseObject).status(responseCode);//new ResponseImpl(responseCode, null, responseMessage, String.class); //(responseCode, null, null, null);
}
/**
* Gets all uploaded Files in a given container of a service/tenant in Swift.
*
* @param xAuthToken
* @param tenantid
* @param path
* @return JsonArray
*/
public static JsonArray getUploadedFiles(String xAuthToken, String tenantid, String path ) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portSwiftMember+"/v1/AUTH_"+tenantid+"/"+path);
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).get(ClientResponse.class);
JsonArray output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonArray.class);
}
return output;
}
public static Status createContainer(String xAuthToken, String tenantid, String containerName){
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portSwiftMember+"/v1/AUTH_"+tenantid+"/"+containerName);
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).put(ClientResponse.class);
return response.getClientResponseStatus();
}
// /**
// * Gets a file from a given container of a service/tenant in Swift.
// *
// * @param xAuthToken
// * @param tenantid
// * @param path
// * @return the file
// * @throws IOException
// * @throws ClassNotFoundException
// */
// public static ResponseBuilder getFile(String xAuthToken, String tenantid, String path ) throws IOException, ClassNotFoundException {
// Client client = Client.create(returnClientConfig());
// client.setChunkedEncodingSize(16384);
// WebResource tokens = client.resource(protocol+openstackIP+portSwiftMember+"/v1/AUTH_"+tenantid+"/"+path);
// ClientResponse response = tokens.header("X-Auth-Token", xAuthToken).get(ClientResponse.class);
//
// ByteArrayOutputStream bos = new ByteArrayOutputStream();
// IOUtils.copy(response.getEntityInputStream(), bos);
//
// ServletOutputStream
//
// return Response.ok(bos.toByteArray()).type(response.getType());
//
// }
/**
* Gets a file from a given container of a service/tenant in Swift.
*
* @param xAuthToken
* @param tenantid
* @param path
* @return the file
* @throws IOException
* @throws ClassNotFoundException
*/
public static void getFile(ServletOutputStream bos, String xAuthToken, String tenantid, String path ) throws IOException, ClassNotFoundException {
Client client = Client.create(returnClientConfig());
client.setChunkedEncodingSize(16384);
WebResource tokens = client.resource(protocol+internalOpenstackIP+portSwiftMember+"/v1/AUTH_"+tenantid+"/"+path);
ClientResponse response = tokens.header("X-Auth-Token", xAuthToken).get(ClientResponse.class);
InputStream bis = response.getEntityInputStream();
int i;
while ((i = bis.read()) != -1) {
bos.write(i);
}
bis.close();
bos.flush();
bos.close();
//return Response.ok().type(response.getType());
}
/**
* Gets a file from a given container of a service/tenant in Swift.
*
* @param xAuthToken
* @param tenantid
* @param path
* @return the file
* @throws IOException
* @throws ClassNotFoundException
*/
public static Response getFile2(String xAuthToken, String tenantid, String path ) throws IOException, ClassNotFoundException {
StreamingOutput clientOS = null;
HttpURLConnection urlconnection = (HttpURLConnection) new URL(protocol+internalOpenstackIP+portSwiftMember+"/v1/AUTH_"+tenantid+"/"+path).openConnection();
urlconnection.addRequestProperty("X-Auth-Token", xAuthToken);
urlconnection.setDoOutput(true);
urlconnection.setRequestMethod("GET");
int responseCode = urlconnection.getResponseCode();
System.out.println(responseCode);
System.out.println(protocol+internalOpenstackIP+portSwiftMember+"/v1/AUTH_"+tenantid+"/"+path);
if((responseCode >= 200 && responseCode <= 208 )|| responseCode == 226){
final InputStream serviceIS = urlconnection.getInputStream();
clientOS = new StreamingOutput() {
@Override
public void write(OutputStream clientOS) throws IOException, WebApplicationException{
int i;
while ((i = serviceIS.read()) != -1) {
clientOS.write(i);
}
serviceIS.close();
clientOS.flush();
clientOS.close();
}
};
}
return Response.status(responseCode).entity(clientOS).type(urlconnection.getContentType()).build();
}
// REQ: curl -i http://137.226.58.142:35357/v2.0/users -X POST -H "User-Agent: python-keystoneclient" -H "Content-Type: application/json" -H "X-Auth-Token: f1895418260b4c549969d8f4c58e14e9"
// REQ BODY: {"user": {"email": null, "password": "TTest", "enabled": true, "name": "TestUser", "tenantId": null}}
//
// RESP: [200] {'date': 'Wed, 11 Dec 2013 16:19:11 GMT', 'content-type': 'application/json', 'content-length': '122', 'vary': 'X-Auth-Token'}
// RESP BODY: {"user": {"name": "TestUser", "id": "9da4e5a0ef6f411287290da982373838", "tenantId": null, "enabled": true, "email": null}}
public static JsonObject createNewUser(String xAuthToken, String name, String password, String email, String tenantId, Boolean enabled ) {
Response.ResponseBuilder r = null;
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portKeystoneAdmin+"/v2.0/users");
JsonObject jsonUserData = new JsonObject();
JsonObject jsonUser = new JsonObject();
jsonUserData.addProperty("name", name);
jsonUserData.addProperty("password", password);
jsonUserData.addProperty("email", email);
jsonUserData.addProperty("tenantId", tenantId);
jsonUserData.addProperty("enabled", enabled);
jsonUser.add("user", jsonUserData);
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).entity(jsonUser).header("X-Auth-Token", xAuthToken).post(ClientResponse.class,jsonUser);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
// REQ: curl -i http://137.226.58.142:35357/v2.0/tenants -X POST -H "User-Agent: python-keystoneclient" -H "Content-Type: application/json" -H "X-Auth-Token: 01a935ea36884711a4f30e06b9bcca30"
// REQ BODY: {"tenant": {"enabled": true, "name": "TestUser", "description": null}}
public static JsonObject createNewService(String service, String description,String xAuthToken) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portKeystoneAdmin+"/v2.0/tenants");
JsonObject jsonTenantData = new JsonObject();
JsonObject jsonTenant = new JsonObject();
jsonTenantData.addProperty("name", service);
jsonTenantData.addProperty("description", description);
jsonTenantData.addProperty("enabled", true);
jsonTenant.add("tenant", jsonTenantData);
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).post(ClientResponse.class,jsonTenant);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
// curl -X PUT -H 'X-Auth-Token:<token>' https://localhost:35357/v2.0/tenants/<tenantid>/users/<userid>/roles/OS-KSADM/<role-id>
public static JsonObject addUserRole(String tenantid, String userid, String roleid, String xAuthToken) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portKeystoneAdmin+"/v2.0/tenants/"+tenantid+"/users/"+userid+"/roles/OS-KSADM/"+roleid);
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).put(ClientResponse.class);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
// curl -i http://137.226.58.2:35357/v2.0/OS-KSADM/roles -X GET -H "X-Auth-Token: "
public static JsonObject getRoles(String xAuthToken) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portKeystoneAdmin+"/v2.0/OS-KSADM/roles");
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).get(ClientResponse.class);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
// curl -i http://137.226.58.142:35357/v2.0/users -X GET -H "X-Auth-Token: "
public static JsonObject getUsers(String xAuthToken) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portKeystoneAdmin+"/v2.0/users");
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).get(ClientResponse.class);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
//curl -i http://137.226.58.142:8774/v2/d34a0c1691fd4bf6b89214e2731c0b33/images/detail -X GET -H "X-Auth-Token: 4ffb1aa188804dd4bce98e4ce11d8839"
public static JsonObject getImages(String xAuthToken, String tenantId) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portNovaMember+"/v2/"+tenantId+"/images/detail");
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).get(ClientResponse.class);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
//curl -i http://137.226.58.142:8774/v2/d34a0c1691fd4bf6b89214e2731c0b33/servers/detail -X GET -H "X-Auth-Token: e8e4949e56ab4072be08287d3fd52d3d"
public static JsonObject getInstances(String xAuthToken, String tenantId) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portNovaMember+"/v2/"+tenantId+"/servers/detail");
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).get(ClientResponse.class);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
//curl -i http://137.226.58.142:8774/v2/d34a0c1691fd4bf6b89214e2731c0b33/servers/detail -X GET -H "X-Auth-Token: e8e4949e56ab4072be08287d3fd52d3d"
//curl -i http://137.226.58.142:8774/v2/d34a0c1691fd4bf6b89214e2731c0b33/servers/44268c11-64d9-4b7b-95ea-d63f28c6db5f/action -X POST -H "Content-Type: application/json" -H "Accept: application/json" -H "X-Auth-Token: 67bdf0dc06f04d8fb75dfe27ba946ca6" -d '{"os-start": null}'
public static JsonObject doActionOnInstance(String xAuthToken, String tenantId, String instanceId, JsonObject action) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portNovaMember+"/v2/"+tenantId+"/servers/"+instanceId+"/action");
ClientResponse response = tokens.entity(action).type(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).post(ClientResponse.class);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
}
| {
"content_hash": "8050f062ae1c6f3cd25c5281b839591a",
"timestamp": "",
"source": "github",
"line_count": 587,
"max_line_length": 275,
"avg_line_length": 36.51107325383305,
"alnum_prop": 0.7288167226577081,
"repo_name": "learning-layers/Tethys",
"id": "0b63e1ac6b3be936497d2a22f1b1c0cd14963724",
"size": "21432",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/de/dbis/acis/cloud/Tethys/client/OpenstackClient.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "92510"
}
],
"symlink_target": ""
} |
<!-- Navigation -->
<nav class="navbar navbar-default navbar-fixed-top">
<div class="container">
<!-- Brand and toggle get grouped for better mobile display -->
<div class="navbar-header page-scroll">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target="#bs-example-navbar-collapse-1">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand page-scroll" href="#page-top">{{site.title}}</a>
<!-- <a class="navbar-brand page-scroll" href="#page-top"><img href="img/logos/logo.png"/></a>-->
</div>
<!-- Collect the nav links, forms, and other content for toggling -->
<div class="collapse navbar-collapse" id="bs-example-navbar-collapse-1">
<ul class="nav navbar-nav navbar-right">
<li class="hidden">
<a href="#page-top"></a>
</li>
<li>
<a class="page-scroll" href="#portfolio">Portfolio</a>
</li>
<li>
<a class="page-scroll" href="#blog">Blog</a>
</li>
<li>
<a class="page-scroll" href="#stories">Stories</a>
</li>
<li>
<a class="page-scroll" href="#about">About</a>
</li>
<li>
<a class="page-scroll" href="#contact">Contact</a>
</li>
</ul>
</div>
<!-- /.navbar-collapse -->
</div>
<!-- /.container-fluid -->
</nav>
<!-- Header -->
<header>
<div class="container">
<div class="intro-text">
<div class="intro-lead-in">Connect to Nature</div>
<div class="intro-heading">It's Full of Surprises!!</div>
<!-- <a href="#services" class="page-scroll btn btn-xl">More</a>-->
</div>
</div>
</header>
<!--
{% for page in site.pages %}
{% if page.title %}<a class="page-link" href="{{ page.url | prepend: site.baseurl }}">{{ page.title }}</a>{% endif %}
{% endfor %}
-->
| {
"content_hash": "80bfa1db64b9bca599023c7400028891",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 127,
"avg_line_length": 41.81666666666667,
"alnum_prop": 0.43961737744121165,
"repo_name": "idbytes/idbytes.github.io",
"id": "3974407659d3abca96500ac910700f12a62eda4e",
"size": "2509",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_includes/header.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "17879"
},
{
"name": "HTML",
"bytes": "22575"
},
{
"name": "JavaScript",
"bytes": "42756"
},
{
"name": "PHP",
"bytes": "1092"
},
{
"name": "Ruby",
"bytes": "715"
}
],
"symlink_target": ""
} |
Tsoftware\Captcha is a captcha generator in Laravel 5!
##Useage
First:
```C
composer require tsoftware/captcha
```
in app/config.php add
```C
'providers' => [
Tsoftware\Captcha\CaptchaProvider::class,
]
'aliases' => [
'Captcha' => Tsoftware\Captcha\CaptchaFacade::class,
]
```
for emaple in app/Http/Controllers/Auth/AuthController.php file
```C
use Captcha;
public function getCaptcha()
{
return Captcha::output('_captcha', 100, 40, 4);
}
protected function validator(array $data)
{
$validator = Validator::make($data, [
'name' => 'required|min:5|max:20',
'email' => 'required|email|max:255|unique:users',
'password' => 'required|confirmed|min:5',
'captcha' => 'required',
]);
$validator->after(function($validator) use ($data){
if (!Captcha::check($data['captcha']))
{
$validator->errors()->add('captcha', 'Wrong captcha code!');
}
});
return $validator;
}
```
##FeedBack
* Mail(admin@yantao.info)
* QQ: 1065317290
* Blog: [Yantao.Info](http://www.yantao.info)
* GitHub: [tsoftware-org/captcha](https://github.com/tsoftware-org/captcha)
| {
"content_hash": "62e31d2f7f699f0170abe96ef7574879",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 75,
"avg_line_length": 20.16949152542373,
"alnum_prop": 0.611764705882353,
"repo_name": "tsoftware-org/captcha",
"id": "6e925f615e682b4e838da40ac4c71f4a47ed2470",
"size": "1206",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "readme.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "5023"
}
],
"symlink_target": ""
} |
<?php
namespace Tmdb\Api;
/**
* Class Collections
* @package Tmdb\Api
* @see http://docs.themoviedb.apiary.io/#collections
*/
class Collections extends AbstractApi
{
/**
* Get the basic collection information for a specific collection id.
*
* You can get the ID needed for this method by making a /movie/{id} request
* and paying attention to the belongs_to_collection hash.
*
* Movie parts are not sorted in any particular order.
* If you would like to sort them yourself you can use the provided release_date.
*
* @param $collection_id
* @param array $parameters
* @param array $headers
* @return mixed
*/
public function getCollection($collection_id, array $parameters = [], array $headers = [])
{
return $this->get('collection/' . $collection_id, $parameters, $headers);
}
/**
* Get all of the images for a particular collection by collection id.
*
* @param $collection_id
* @param array $parameters
* @param array $headers
* @return mixed
*/
public function getImages($collection_id, array $parameters = [], array $headers = [])
{
return $this->get('collection/' . $collection_id . '/images', $parameters, $headers);
}
/**
* Get the list of translations that exist for a TV episode.
*
* @param $collection_id
* @param array $parameters
* @param array $headers
* @return mixed
*/
public function getTranslations($collection_id, array $parameters = [], array $headers = [])
{
return $this->get('collection/' . $collection_id . '/translations', $parameters, $headers);
}
}
| {
"content_hash": "78892b96ac576740a8abf6dac3ef5f5d",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 99,
"avg_line_length": 29.20689655172414,
"alnum_prop": 0.6210153482880756,
"repo_name": "php-tmdb/api",
"id": "15e1df3b208a2af4b0f930aa516be226d4e06055",
"size": "2027",
"binary": false,
"copies": "1",
"ref": "refs/heads/4.1",
"path": "lib/Tmdb/Api/Collections.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "984412"
}
],
"symlink_target": ""
} |
<?php
namespace cascade\modules\core\TypeFile\models;
use cascade\models\Storage;
use canis\helpers\Html;
/**
* ObjectFile is the model class for table "object_file".
*
* @property string $id
* @property string $storage_id
* @property string $name
* @property string $created
* @property string $modified
* @property Registry $registry
* @property Storage $storage
*
* @author Jacob Morrison <email@ofjacob.com>
*/
class ObjectFile extends \cascade\components\types\ActiveRecord
{
/**
* @var [[@doctodo var_type:_labelName]] [[@doctodo var_description:_labelName]]
*/
protected $_labelName;
/**
* @inheritdoc
*/
public $descriptorField = 'labelName';
/**
* Get label name.
*
* @return [[@doctodo return_type:getLabelName]] [[@doctodo return_description:getLabelName]]
*/
public function getLabelName()
{
if (is_null($this->_labelName)) {
$this->_labelName = $this->name;
$storage = $this->storage;
if (!empty($storage)) {
if (empty($this->_labelName)) {
$this->_labelName = $storage->file_name;
} else {
$this->_labelName .= " ({$storage->file_name})";
}
}
}
return $this->_labelName;
}
/**
* @inheritdoc
*/
public static function searchFields()
{
$modelClass = get_called_class();
$model = new $modelClass();
$fields = [];
$fields[] = ['name'];
$fields[] = ['{{storage}}.[[file_name]]'];
return $fields;
}
/**
* Set label name.
*
* @param [[@doctodo param_type:value]] $value [[@doctodo param_description:value]]
*/
public function setLabelName($value)
{
if (!empty($this->name)) {
$this->_labelName = $this->name . ' (' . $value . ')';
} else {
$this->_labelName = $value;
}
}
/**
* @inheritdoc
*/
public static function find()
{
$query = parent::find();
$alias = $query->primaryAlias;
$query->select(['`' . $alias . '`.*', '`storage`.`file_name` as `labelName`']);
$query->join('INNER JOIN', Storage::tableName() . ' storage', '`storage`.`id` = `' . $alias . '`.`storage_id`');
return $query;
}
/* public function getDescriptor()
{
$label = $this->name;
$storage = $this->storage;
if (!empty($storage)) {
if (empty($label)) {
$label = $storage->file_name;
} else {
$label .= " ({$storage->file_name})";
}
}
return $label;
}*/
/**
* @inheritdoc
*/
public static function tableName()
{
return 'object_file';
}
/**
* @inheritdoc
*/
public function behaviors()
{
return array_merge(parent::behaviors(), [
'Storage' => [
'class' => 'cascade\components\storageHandlers\StorageBehavior',
],
]);
}
/**
* @inheritdoc
*/
public function rules()
{
return [
[['storage_id'], 'required', 'on' => 'create'],
[['labelName'], 'safe'],
[['id'], 'string', 'max' => 36],
[['name'], 'string', 'max' => 255],
];
}
/**
* @inheritdoc
*/
public function fieldSettings()
{
return [
'name' => [],
'storage_id' => ['formField' => ['type' => 'file']],
];
}
/**
* @inheritdoc
*/
public function formSettings($name, $settings = [])
{
if (!isset($settings['fields'])) {
$settings['fields'] = [];
}
$settings['fields'][] = ['name'];
$settings['fields'][] = ['storage_id'];
// $settings['fields'][] = [];
return $settings;
}
/**
* @inheritdoc
*/
public function attributeLabels()
{
return [
'id' => 'ID',
'storage_id' => 'File',
'name' => 'Name',
'created' => 'Created',
'modified' => 'Modified',
];
}
/**
* Get registry.
*
* @return \yii\db\ActiveRelation
*/
public function getRegistry()
{
return $this->hasOne(Registry::className(), ['id' => 'id']);
}
/**
* Get storage.
*
* @return \yii\db\ActiveRelation
*/
public function getStorage()
{
return $this->hasOne(Storage::className(), ['id' => 'storage_id']);
}
/**
* Get download link.
*
* @param [[@doctodo param_type:label]] $label [[@doctodo param_description:label]] [optional]
* @param array $htmlAttributes [[@doctodo param_description:htmlAttributes]] [optional]
*
* @return [[@doctodo return_type:getDownloadLink]] [[@doctodo return_description:getDownloadLink]]
*/
public function getDownloadLink($label = null, $htmlAttributes = [])
{
if (is_null($label)) {
$label = $this->descriptor;
}
return Html::a($label, ['/object/view', 'subaction' => 'download', 'id' => $this->id]);
}
}
| {
"content_hash": "070b3fde605a3f7618d1a792b5b43de5",
"timestamp": "",
"source": "github",
"line_count": 218,
"max_line_length": 120,
"avg_line_length": 24.325688073394495,
"alnum_prop": 0.48161418065246087,
"repo_name": "canis-io/cascade-core-types",
"id": "e3613e65e1a74a5e679f9a6e3715ae24d309bbd9",
"size": "5427",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "TypeFile/models/ObjectFile.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "270"
},
{
"name": "JavaScript",
"bytes": "250"
},
{
"name": "PHP",
"bytes": "197910"
}
],
"symlink_target": ""
} |
require 'coveralls'
Coveralls.wear!
require "test/unit"
require "#{ENV["project_path"]}/test/test_helper"
require "#{ENV["project_path"]}/lib/Trick"
require "#{ENV["project_path"]}/lib/Invoker"
class RubyTricksTest < Test::Unit::TestCase
attr_accessor :invoker
def initialize (*args)
super *args
@invoker = Invoker.new
end
def test_intermediate_bypass_parent_private
@invoker.current_trick= Trick.new("intermediate/bypass_parent_private.rb")
response = @invoker.invoke
expected = [
"Nice child:",
["Oops! private method `foo' called for CLASSNAME", "NiceChild"],
["Oops! private method `dangerous_operation' called for CLASSNAME", "NiceChild"],
"My variables: ",
"",
"Naughty child:",
"I'm private",
'My variables: ["kill-signal", ["Malicious", "Data"]]'
]
assert_expected(response[0], expected[0])
assert_object_line(response[1], *expected[1])
assert_object_line(response[2], *expected[2])
assert_expected(response[3..-1], expected[3..-1])
end
end
# class InvokerTest < Test::Unit::TestCase
# invoker = Invoker.new(:test)
# def test_method_one
# invoker.InputManager = InputManager.new(["begin","repeat","quit"])
# invoker.menu
# ...
# end
# end
| {
"content_hash": "251df2a94cd4d7abc8dbfa10f39850b2",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 87,
"avg_line_length": 27.630434782608695,
"alnum_prop": 0.6490952006294256,
"repo_name": "devonparsons/ruby-tricks",
"id": "84116c8a87631607ccdcd6f8f977a1b9ebb8bcda",
"size": "1312",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/intermediate_test.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "17666"
}
],
"symlink_target": ""
} |
#include "catalog/PartitionScheme.hpp"
#include <cstddef>
#include <limits>
#include <unordered_set>
#include <utility>
#include <vector>
#include "catalog/Catalog.pb.h"
#include "catalog/PartitionSchemeHeader.hpp"
#include "storage/StorageBlockInfo.hpp"
#include "glog/logging.h"
using std::move;
using std::unordered_set;
using std::vector;
namespace quickstep {
bool PartitionScheme::ProtoIsValid(
const serialization::PartitionScheme &proto) {
// Check that proto is fully initialized.
if (!proto.IsInitialized()) {
return false;
}
if (!PartitionSchemeHeader::ProtoIsValid(proto.header())) {
return false;
}
if (static_cast<std::size_t>(proto.partitions_size()) != proto.header().num_partitions()) {
return false;
}
return true;
}
PartitionScheme* PartitionScheme::ReconstructFromProto(const serialization::PartitionScheme &proto) {
DCHECK(ProtoIsValid(proto))
<< "Attempted to create PartitionScheme from an invalid proto description:\n"
<< proto.DebugString();
vector<unordered_set<block_id>> blocks_in_partition;
for (int i = 0; i < proto.partitions_size(); ++i) {
unordered_set<block_id> blocks;
const serialization::Partition &proto_blocks = proto.partitions(i);
for (int j = 0; j < proto_blocks.blocks_size(); ++j) {
blocks.insert(proto_blocks.blocks(j));
}
blocks_in_partition.push_back(move(blocks));
}
return new PartitionScheme(
PartitionSchemeHeader::ReconstructFromProto(proto.header()),
move(blocks_in_partition));
}
serialization::PartitionScheme PartitionScheme::getProto() const {
serialization::PartitionScheme proto;
proto.mutable_header()->MergeFrom(header_->getProto());
// Add blocks to the corresponding partitions.
for (std::size_t i = 0; i < blocks_in_partition_.size(); ++i) {
serialization::Partition *proto_blocks = proto.add_partitions();
SpinSharedMutexSharedLock<false> lock(blocks_in_partition_mutexes_[i]);
const std::unordered_set<block_id> &partition = blocks_in_partition_[i];
for (const block_id block : partition) {
proto_blocks->add_blocks(block);
}
}
return proto;
}
partition_id PartitionScheme::getPartitionForBlock(const block_id block) const {
// Check if the block is present in the available partitions.
// If so, return the partition id for the block.
for (partition_id part_id = 0; part_id < header_->getNumPartitions(); ++part_id) {
SpinSharedMutexSharedLock<false> lock(
blocks_in_partition_mutexes_[part_id]);
if (blocks_in_partition_[part_id].find(block) !=
blocks_in_partition_[part_id].end()) {
return part_id;
}
}
// Block was not found in any partitions.
return std::numeric_limits<std::size_t>::max();
}
} // namespace quickstep
| {
"content_hash": "4f55f4c26594adf640517db7f18693d6",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 101,
"avg_line_length": 28.75257731958763,
"alnum_prop": 0.6970240229472929,
"repo_name": "cramja/incubator-quickstep",
"id": "1d7dce0bc6cf68ee9ef85f4cac3f595334d40a8d",
"size": "3598",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "catalog/PartitionScheme.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "8868819"
},
{
"name": "CMake",
"bytes": "635006"
},
{
"name": "Protocol Buffer",
"bytes": "51411"
},
{
"name": "Python",
"bytes": "33257"
},
{
"name": "Ruby",
"bytes": "5352"
},
{
"name": "Shell",
"bytes": "9617"
}
],
"symlink_target": ""
} |
namespace Microsoft.Azure.CognitiveServices.Search.EntitySearch.Models
{
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
public partial class CreativeWork : Thing
{
/// <summary>
/// Initializes a new instance of the CreativeWork class.
/// </summary>
public CreativeWork()
{
CustomInit();
}
/// <summary>
/// Initializes a new instance of the CreativeWork class.
/// </summary>
/// <param name="contractualRules">A list of rules that you must adhere
/// to if you display the item.</param>
/// <param name="webSearchUrl">The URL To Bing's search result for this
/// item.</param>
/// <param name="name">The name of the thing represented by this
/// object.</param>
/// <param name="url">The URL to get more information about the thing
/// represented by this object.</param>
/// <param name="description">A short description of the item.</param>
/// <param name="entityPresentationInfo">Additional information about
/// the entity such as hints that you can use to determine the entity's
/// type. To determine the entity's type, use the entityScenario and
/// entityTypeHint fields.</param>
/// <param name="bingId">An ID that uniquely identifies this
/// item.</param>
/// <param name="thumbnailUrl">The URL to a thumbnail of the
/// item.</param>
/// <param name="provider">The source of the creative work.</param>
public CreativeWork(string id = default(string), IList<ContractualRulesContractualRule> contractualRules = default(IList<ContractualRulesContractualRule>), string webSearchUrl = default(string), string name = default(string), string url = default(string), ImageObject image = default(ImageObject), string description = default(string), EntitiesEntityPresentationInfo entityPresentationInfo = default(EntitiesEntityPresentationInfo), string bingId = default(string), string thumbnailUrl = default(string), IList<Thing> provider = default(IList<Thing>), string text = default(string))
: base(id, contractualRules, webSearchUrl, name, url, image, description, entityPresentationInfo, bingId)
{
ThumbnailUrl = thumbnailUrl;
Provider = provider;
Text = text;
CustomInit();
}
/// <summary>
/// An initialization method that performs custom operations like setting defaults
/// </summary>
partial void CustomInit();
/// <summary>
/// Gets the URL to a thumbnail of the item.
/// </summary>
[JsonProperty(PropertyName = "thumbnailUrl")]
public string ThumbnailUrl { get; private set; }
/// <summary>
/// Gets the source of the creative work.
/// </summary>
[JsonProperty(PropertyName = "provider")]
public IList<Thing> Provider { get; private set; }
/// <summary>
/// </summary>
[JsonProperty(PropertyName = "text")]
public string Text { get; private set; }
/// <summary>
/// Validate the object.
/// </summary>
/// <exception cref="Rest.ValidationException">
/// Thrown if validation fails
/// </exception>
public override void Validate()
{
base.Validate();
if (Provider != null)
{
foreach (var element in Provider)
{
if (element != null)
{
element.Validate();
}
}
}
}
}
}
| {
"content_hash": "50ef0063eba58010f98f24e7a0ccf442",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 590,
"avg_line_length": 41.56043956043956,
"alnum_prop": 0.5909571655208884,
"repo_name": "peshen/azure-sdk-for-net",
"id": "c89051a7c2e63f4612293e9c92ea4b4dcfd47da3",
"size": "3972",
"binary": false,
"copies": "2",
"ref": "refs/heads/psSdkJson6",
"path": "src/SDKs/CognitiveServices/dataPlane/Search/Search/Generated/EntitySearch/Models/CreativeWork.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "118"
},
{
"name": "Batchfile",
"bytes": "16234"
},
{
"name": "C#",
"bytes": "75870978"
},
{
"name": "CSS",
"bytes": "685"
},
{
"name": "JavaScript",
"bytes": "7875"
},
{
"name": "PowerShell",
"bytes": "21530"
},
{
"name": "Shell",
"bytes": "9959"
},
{
"name": "XSLT",
"bytes": "6114"
}
],
"symlink_target": ""
} |
package org.apache.jsp.WEB_002dINF.views.layouts;
import javax.servlet.*;
import javax.servlet.http.*;
import javax.servlet.jsp.*;
public final class blank_jsp extends org.apache.jasper.runtime.HttpJspBase
implements org.apache.jasper.runtime.JspSourceDependent {
static private org.apache.jasper.runtime.ProtectedFunctionMapper _jspx_fnmap_0;
static {
_jspx_fnmap_0= org.apache.jasper.runtime.ProtectedFunctionMapper.getMapForFunction("fns:getAdminPath", com.dc.smarteam.common.config.Global.class, "getAdminPath", new Class[] {});
}
private static final JspFactory _jspxFactory = JspFactory.getDefaultFactory();
private static java.util.List _jspx_dependants;
static {
_jspx_dependants = new java.util.ArrayList(5);
_jspx_dependants.add("/WEB-INF/views/include/taglib.jsp");
_jspx_dependants.add("/WEB-INF/views/include/head.jsp");
_jspx_dependants.add("/WEB-INF/tlds/shiros.tld");
_jspx_dependants.add("/WEB-INF/tlds/fns.tld");
_jspx_dependants.add("/WEB-INF/tlds/fnc.tld");
}
private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005fc_005fset_0026_005fvar_005fvalue_005fnobody;
private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005fsitemesh_005ftitle_005fnobody;
private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005fsitemesh_005fhead_005fnobody;
private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005fsitemesh_005fbody_005fnobody;
private javax.el.ExpressionFactory _el_expressionfactory;
private org.apache.AnnotationProcessor _jsp_annotationprocessor;
public Object getDependants() {
return _jspx_dependants;
}
public void _jspInit() {
_005fjspx_005ftagPool_005fc_005fset_0026_005fvar_005fvalue_005fnobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_005fjspx_005ftagPool_005fsitemesh_005ftitle_005fnobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_005fjspx_005ftagPool_005fsitemesh_005fhead_005fnobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_005fjspx_005ftagPool_005fsitemesh_005fbody_005fnobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_el_expressionfactory = _jspxFactory.getJspApplicationContext(getServletConfig().getServletContext()).getExpressionFactory();
_jsp_annotationprocessor = (org.apache.AnnotationProcessor) getServletConfig().getServletContext().getAttribute(org.apache.AnnotationProcessor.class.getName());
}
public void _jspDestroy() {
_005fjspx_005ftagPool_005fc_005fset_0026_005fvar_005fvalue_005fnobody.release();
_005fjspx_005ftagPool_005fsitemesh_005ftitle_005fnobody.release();
_005fjspx_005ftagPool_005fsitemesh_005fhead_005fnobody.release();
_005fjspx_005ftagPool_005fsitemesh_005fbody_005fnobody.release();
}
public void _jspService(HttpServletRequest request, HttpServletResponse response)
throws java.io.IOException, ServletException {
PageContext pageContext = null;
HttpSession session = null;
ServletContext application = null;
ServletConfig config = null;
JspWriter out = null;
Object page = this;
JspWriter _jspx_out = null;
PageContext _jspx_page_context = null;
try {
response.setContentType("text/html;charset=UTF-8");
pageContext = _jspxFactory.getPageContext(this, request, response,
null, true, 8192, true);
_jspx_page_context = pageContext;
application = pageContext.getServletContext();
config = pageContext.getServletConfig();
session = pageContext.getSession();
out = pageContext.getOut();
_jspx_out = out;
out.write('\n');
out.write("\n");
out.write("\n");
out.write("\n");
out.write("\n");
out.write("\n");
out.write("\n");
out.write("\n");
out.write("\n");
out.write("\n");
out.write("\n");
if (_jspx_meth_c_005fset_005f0(_jspx_page_context))
return;
out.write('\n');
if (_jspx_meth_c_005fset_005f1(_jspx_page_context))
return;
out.write("\n");
out.write("\n");
out.write("<!DOCTYPE html>\n");
out.write("<html style=\"overflow-x:auto;overflow-y:auto;\">\n");
out.write("<head>\n");
out.write("\t<title>");
if (_jspx_meth_sitemesh_005ftitle_005f0(_jspx_page_context))
return;
out.write("</title><!-- - Powered By JeeSite -->\n");
out.write("\t");
out.write("<meta http-equiv=\"Content-Type\" content=\"text/html;charset=utf-8\" /><meta name=\"author\" content=\"http://jeesite.com/\"/>\n");
out.write("<meta name=\"renderer\" content=\"webkit\"><meta http-equiv=\"X-UA-Compatible\" content=\"IE=8,IE=9,IE=10\" />\n");
out.write("<meta http-equiv=\"Expires\" content=\"0\"><meta http-equiv=\"Cache-Control\" content=\"no-cache\"><meta http-equiv=\"Cache-Control\" content=\"no-store\">\n");
out.write("<script src=\"");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${ctxStatic}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("/jquery/jquery-1.8.3.min.js\" type=\"text/javascript\"></script>\n");
out.write("<link href=\"");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${ctxStatic}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("/bootstrap/2.3.1/css_");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${not empty cookie.theme.value ? cookie.theme.value : 'cerulean'}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("/bootstrap.min.css\" type=\"text/css\" rel=\"stylesheet\" />\n");
out.write("<script src=\"");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${ctxStatic}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("/bootstrap/2.3.1/js/bootstrap.min.js\" type=\"text/javascript\"></script>\n");
out.write("<link href=\"");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${ctxStatic}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("/bootstrap/2.3.1/awesome/font-awesome.min.css\" type=\"text/css\" rel=\"stylesheet\" />\n");
out.write("<!--[if lte IE 7]><link href=\"");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${ctxStatic}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("/bootstrap/2.3.1/awesome/font-awesome-ie7.min.css\" type=\"text/css\" rel=\"stylesheet\" /><![endif]-->\n");
out.write("<!--[if lte IE 6]><link href=\"");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${ctxStatic}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("/bootstrap/bsie/css/bootstrap-ie6.min.css\" type=\"text/css\" rel=\"stylesheet\" />\n");
out.write("<script src=\"");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${ctxStatic}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("/bootstrap/bsie/js/bootstrap-ie.min.js\" type=\"text/javascript\"></script><![endif]-->\n");
out.write("<link href=\"");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${ctxStatic}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("/jquery-select2/3.4/select2.min.css\" rel=\"stylesheet\" />\n");
out.write("<script src=\"");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${ctxStatic}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("/jquery-select2/3.4/select2.min.js\" type=\"text/javascript\"></script>\n");
out.write("<link href=\"");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${ctxStatic}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("/jquery-validation/1.11.0/jquery.validate.min.css\" type=\"text/css\" rel=\"stylesheet\" />\n");
out.write("<script src=\"");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${ctxStatic}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("/jquery-validation/1.11.0/jquery.validate.min.js\" type=\"text/javascript\"></script>\n");
out.write("<link href=\"");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${ctxStatic}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("/jquery-jbox/2.3/Skins/Bootstrap/jbox.min.css\" rel=\"stylesheet\" />\n");
out.write("<script src=\"");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${ctxStatic}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("/jquery-jbox/2.3/jquery.jBox-2.3.min.js\" type=\"text/javascript\"></script>\n");
out.write("<script src=\"");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${ctxStatic}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("/My97DatePicker/WdatePicker.js\" type=\"text/javascript\"></script>\n");
out.write("<script src=\"");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${ctxStatic}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("/common/mustache.min.js\" type=\"text/javascript\"></script>\n");
out.write("<link href=\"");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${ctxStatic}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("/common/jeesite.min.css\" type=\"text/css\" rel=\"stylesheet\" />\n");
out.write("<script src=\"");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${ctxStatic}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("/common/jeesite.min.js\" type=\"text/javascript\"></script>\n");
out.write("<script type=\"text/javascript\">var ctx = '");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${ctx}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("', ctxStatic='");
out.write((java.lang.String) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${ctxStatic}", java.lang.String.class, (PageContext)_jspx_page_context, null, false));
out.write("';</script>");
out.write("\n");
out.write("\t<!-- Baidu tongji analytics --><script>var _hmt=_hmt||[];(function(){var hm=document.createElement(\"script\");hm.src=\"//hm.baidu.com/hm.js?82116c626a8d504a5c0675073362ef6f\";var s=document.getElementsByTagName(\"script\")[0];s.parentNode.insertBefore(hm,s);})();</script>\n");
out.write("\t");
if (_jspx_meth_sitemesh_005fhead_005f0(_jspx_page_context))
return;
out.write("\n");
out.write("</head>\n");
out.write("<body>\n");
out.write("\t");
if (_jspx_meth_sitemesh_005fbody_005f0(_jspx_page_context))
return;
out.write("\n");
out.write("</body>\n");
out.write("</html>");
} catch (Throwable t) {
if (!(t instanceof SkipPageException)){
out = _jspx_out;
if (out != null && out.getBufferSize() != 0)
try { out.clearBuffer(); } catch (java.io.IOException e) {}
if (_jspx_page_context != null) _jspx_page_context.handlePageException(t);
else log(t.getMessage(), t);
}
} finally {
_jspxFactory.releasePageContext(_jspx_page_context);
}
}
private boolean _jspx_meth_c_005fset_005f0(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// c:set
org.apache.taglibs.standard.tag.rt.core.SetTag _jspx_th_c_005fset_005f0 = (org.apache.taglibs.standard.tag.rt.core.SetTag) _005fjspx_005ftagPool_005fc_005fset_0026_005fvar_005fvalue_005fnobody.get(org.apache.taglibs.standard.tag.rt.core.SetTag.class);
_jspx_th_c_005fset_005f0.setPageContext(_jspx_page_context);
_jspx_th_c_005fset_005f0.setParent(null);
// /WEB-INF/views/include/taglib.jsp(11,0) name = var type = java.lang.String reqTime = false required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_c_005fset_005f0.setVar("ctx");
// /WEB-INF/views/include/taglib.jsp(11,0) name = value type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_c_005fset_005f0.setValue((java.lang.Object) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${pageContext.request.contextPath}${fns:getAdminPath()}", java.lang.Object.class, (PageContext)_jspx_page_context, _jspx_fnmap_0, false));
int _jspx_eval_c_005fset_005f0 = _jspx_th_c_005fset_005f0.doStartTag();
if (_jspx_th_c_005fset_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005fc_005fset_0026_005fvar_005fvalue_005fnobody.reuse(_jspx_th_c_005fset_005f0);
return true;
}
_005fjspx_005ftagPool_005fc_005fset_0026_005fvar_005fvalue_005fnobody.reuse(_jspx_th_c_005fset_005f0);
return false;
}
private boolean _jspx_meth_c_005fset_005f1(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// c:set
org.apache.taglibs.standard.tag.rt.core.SetTag _jspx_th_c_005fset_005f1 = (org.apache.taglibs.standard.tag.rt.core.SetTag) _005fjspx_005ftagPool_005fc_005fset_0026_005fvar_005fvalue_005fnobody.get(org.apache.taglibs.standard.tag.rt.core.SetTag.class);
_jspx_th_c_005fset_005f1.setPageContext(_jspx_page_context);
_jspx_th_c_005fset_005f1.setParent(null);
// /WEB-INF/views/include/taglib.jsp(12,0) name = var type = java.lang.String reqTime = false required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_c_005fset_005f1.setVar("ctxStatic");
// /WEB-INF/views/include/taglib.jsp(12,0) name = value type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_c_005fset_005f1.setValue((java.lang.Object) org.apache.jasper.runtime.PageContextImpl.proprietaryEvaluate("${pageContext.request.contextPath}/static", java.lang.Object.class, (PageContext)_jspx_page_context, null, false));
int _jspx_eval_c_005fset_005f1 = _jspx_th_c_005fset_005f1.doStartTag();
if (_jspx_th_c_005fset_005f1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005fc_005fset_0026_005fvar_005fvalue_005fnobody.reuse(_jspx_th_c_005fset_005f1);
return true;
}
_005fjspx_005ftagPool_005fc_005fset_0026_005fvar_005fvalue_005fnobody.reuse(_jspx_th_c_005fset_005f1);
return false;
}
private boolean _jspx_meth_sitemesh_005ftitle_005f0(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// sitemesh:title
com.opensymphony.module.sitemesh.taglib.decorator.TitleTag _jspx_th_sitemesh_005ftitle_005f0 = (com.opensymphony.module.sitemesh.taglib.decorator.TitleTag) _005fjspx_005ftagPool_005fsitemesh_005ftitle_005fnobody.get(com.opensymphony.module.sitemesh.taglib.decorator.TitleTag.class);
_jspx_th_sitemesh_005ftitle_005f0.setPageContext(_jspx_page_context);
_jspx_th_sitemesh_005ftitle_005f0.setParent(null);
int _jspx_eval_sitemesh_005ftitle_005f0 = _jspx_th_sitemesh_005ftitle_005f0.doStartTag();
if (_jspx_th_sitemesh_005ftitle_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005fsitemesh_005ftitle_005fnobody.reuse(_jspx_th_sitemesh_005ftitle_005f0);
return true;
}
_005fjspx_005ftagPool_005fsitemesh_005ftitle_005fnobody.reuse(_jspx_th_sitemesh_005ftitle_005f0);
return false;
}
private boolean _jspx_meth_sitemesh_005fhead_005f0(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// sitemesh:head
com.opensymphony.module.sitemesh.taglib.decorator.HeadTag _jspx_th_sitemesh_005fhead_005f0 = (com.opensymphony.module.sitemesh.taglib.decorator.HeadTag) _005fjspx_005ftagPool_005fsitemesh_005fhead_005fnobody.get(com.opensymphony.module.sitemesh.taglib.decorator.HeadTag.class);
_jspx_th_sitemesh_005fhead_005f0.setPageContext(_jspx_page_context);
_jspx_th_sitemesh_005fhead_005f0.setParent(null);
int _jspx_eval_sitemesh_005fhead_005f0 = _jspx_th_sitemesh_005fhead_005f0.doStartTag();
if (_jspx_th_sitemesh_005fhead_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005fsitemesh_005fhead_005fnobody.reuse(_jspx_th_sitemesh_005fhead_005f0);
return true;
}
_005fjspx_005ftagPool_005fsitemesh_005fhead_005fnobody.reuse(_jspx_th_sitemesh_005fhead_005f0);
return false;
}
private boolean _jspx_meth_sitemesh_005fbody_005f0(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// sitemesh:body
com.opensymphony.module.sitemesh.taglib.decorator.BodyTag _jspx_th_sitemesh_005fbody_005f0 = (com.opensymphony.module.sitemesh.taglib.decorator.BodyTag) _005fjspx_005ftagPool_005fsitemesh_005fbody_005fnobody.get(com.opensymphony.module.sitemesh.taglib.decorator.BodyTag.class);
_jspx_th_sitemesh_005fbody_005f0.setPageContext(_jspx_page_context);
_jspx_th_sitemesh_005fbody_005f0.setParent(null);
int _jspx_eval_sitemesh_005fbody_005f0 = _jspx_th_sitemesh_005fbody_005f0.doStartTag();
if (_jspx_th_sitemesh_005fbody_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005fsitemesh_005fbody_005fnobody.reuse(_jspx_th_sitemesh_005fbody_005f0);
return true;
}
_005fjspx_005ftagPool_005fsitemesh_005fbody_005fnobody.reuse(_jspx_th_sitemesh_005fbody_005f0);
return false;
}
}
| {
"content_hash": "d28b3cd7649ce18fa6b0d832698e74ff",
"timestamp": "",
"source": "github",
"line_count": 287,
"max_line_length": 297,
"avg_line_length": 66.47038327526133,
"alnum_prop": 0.7099124600304031,
"repo_name": "VincentFxz/EAM",
"id": "245d43db20e26b28fd3cee6a7a52f593eeeea9de",
"size": "19077",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "target/tomcat/work/localEngine-8181/localhost/smarteam/org/apache/jsp/WEB_002dINF/views/layouts/blank_jsp.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "3753"
},
{
"name": "ApacheConf",
"bytes": "1536"
},
{
"name": "Batchfile",
"bytes": "5759"
},
{
"name": "CSS",
"bytes": "2231708"
},
{
"name": "HTML",
"bytes": "3619034"
},
{
"name": "Java",
"bytes": "3326237"
},
{
"name": "JavaScript",
"bytes": "14365592"
},
{
"name": "PHP",
"bytes": "8060"
},
{
"name": "PLSQL",
"bytes": "66284"
}
],
"symlink_target": ""
} |
const fs = require('fs')
const { createCanvas } = require('canvas')
const width = 64
const height = 48
const weight = 300
const map = {
ampersand: '&',
bar: '|',
colon: ':',
equal: '=',
exclam: '!',
greater: '>',
hyphen: '-',
less: '<',
plus: '+',
question: '?',
slash: '/',
underscore: '_',
w: 'w',
}
function generateLigaturePreview(style, filename) {
const ligature = filename.split('.').slice(0, 1)[0]
const chars = ligature.split('_')
const text = chars.reduce((t, c) => t + map[c], '')
const canvas = createCanvas(width, height)
const context = canvas.getContext('2d')
context.fillStyle = '#000'
context.fillRect(0, 0, width, height)
context.font = `${weight} ${style} 18pt "Operator Mono SSm Lig"`
context.textAlign = 'center'
context.fillStyle = '#fff'
context.text = text
context.fillText(text, 32, 32)
const buffer = canvas.toBuffer('image/png')
console.log(ligature)
fs.writeFileSync(`./images/preview/${style}/${ligature}.png`, buffer)
return { text, ligature, filename: `${ligature}.png` }
}
function generate(style, filenames) {
let html = `<html><head>
<style>
body { background-color: #000; color: #fff; font-family: sans-serif; margin: 16px;}
a { color: #fff; text-decoration: none; padding: 4px;}
a.active {color: #000; background-color: #fff;}
.container { display: grid; grid-template-columns: repeat(auto-fill, minmax(200px, 1fr)); }
.ligature { display: flex; flex-direction: column; align-items: center; margin: 8px; }
</style>
</head>
<body>
<h1>Operator Mono Ligatures</h1>
<a href="https://htmlpreview.github.io/?https://github.com/kiliman/operator-mono-lig/blob/master/images/preview/normal/index.html" class="${
style === 'normal' ? 'active' : ''
}">Normal</a> |
<a href="https://htmlpreview.github.io/?https://github.com/kiliman/operator-mono-lig/blob/master/images/preview/italic/index.html" class="${
style === 'italic' ? 'active' : ''
}">Italic</a>
<div class="container">
`
filenames.forEach(f => {
const { ligature, filename } = generateLigaturePreview(style, f)
html += `<div class="ligature"><img src="${filename}"/><div>${ligature}</div></div>`
})
html += `</div></body></html>`
fs.writeFileSync(`./images/preview/${style}/index.html`, html)
}
const filenames = fs
.readdirSync('./ligature/OperatorMonoSSmLig-Book/glyphs')
.filter(f => !!f.match(/[^\d]\.liga\.xml$/))
generate('normal', filenames)
generate('italic', filenames)
| {
"content_hash": "d65c1fe92fdaf9fa609751079cbe1f6b",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 144,
"avg_line_length": 29.729411764705883,
"alnum_prop": 0.6347447566284131,
"repo_name": "kiliman/operator-mono-lig",
"id": "8616fff3f6810faec547e3280eda0c4a03fa2dd2",
"size": "2527",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "genpreview.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "875"
},
{
"name": "Go",
"bytes": "744"
},
{
"name": "HTML",
"bytes": "7516"
},
{
"name": "JavaScript",
"bytes": "27250"
},
{
"name": "Shell",
"bytes": "1058"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" languageLevel="JDK_1_6" assert-keyword="true" jdk-15="true" project-jdk-name="Android API 20 Platform" project-jdk-type="Android SDK">
<output url="file://$PROJECT_DIR$/out" />
</component>
</project>
| {
"content_hash": "779eeb92e77f1689e4251eb6dec2ac73",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 185,
"avg_line_length": 45.714285714285715,
"alnum_prop": 0.69375,
"repo_name": "teodorrupi/accountant-app",
"id": "24fc4b5f89824d5bee84138c8d09d61332f80b4c",
"size": "320",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "accountant/.idea/misc.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "1011"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0) on Fri Feb 01 09:13:24 EST 2013 -->
<title>Uses of Package org.drip.analytics.daycount</title>
<meta name="date" content="2013-02-01">
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Package org.drip.analytics.daycount";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li>Class</li>
<li class="navBarCell1Rev">Use</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?org/drip/analytics/daycount/package-use.html" target="_top">Frames</a></li>
<li><a href="package-use.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h1 title="Uses of Package org.drip.analytics.daycount" class="title">Uses of Package<br>org.drip.analytics.daycount</h1>
</div>
<div class="contentContainer">
<ul class="blockList">
<li class="blockList">
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../org/drip/analytics/daycount/package-summary.html">org.drip.analytics.daycount</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#org.drip.analytics.daycount">org.drip.analytics.daycount</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="#org.drip.analytics.period">org.drip.analytics.period</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="#org.drip.param.valuation">org.drip.param.valuation</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="#org.drip.product.credit">org.drip.product.credit</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="#org.drip.product.params">org.drip.product.params</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="#org.drip.product.rates">org.drip.product.rates</a></td>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="org.drip.analytics.daycount">
<!-- -->
</a>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../../org/drip/analytics/daycount/package-summary.html">org.drip.analytics.daycount</a> used by <a href="../../../../org/drip/analytics/daycount/package-summary.html">org.drip.analytics.daycount</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colOne"><a href="../../../../org/drip/analytics/daycount/class-use/ActActDCParams.html#org.drip.analytics.daycount">ActActDCParams</a>
<div class="block">Class contains parameters to represent the Act/Act day count.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colOne"><a href="../../../../org/drip/analytics/daycount/class-use/DateEOMAdjustment.html#org.drip.analytics.daycount">DateEOMAdjustment</a>
<div class="block">This class holds the applicable anterior and posterior EOM adjustments for a given date pair.</div>
</td>
</tr>
<tr class="altColor">
<td class="colOne"><a href="../../../../org/drip/analytics/daycount/class-use/DCFCalculator.html#org.drip.analytics.daycount">DCFCalculator</a>
<div class="block">This interface holds the DCC name and its year-fraction and the days accrued stubs.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="org.drip.analytics.period">
<!-- -->
</a>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../../org/drip/analytics/daycount/package-summary.html">org.drip.analytics.daycount</a> used by <a href="../../../../org/drip/analytics/period/package-summary.html">org.drip.analytics.period</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colOne"><a href="../../../../org/drip/analytics/daycount/class-use/DateAdjustParams.html#org.drip.analytics.period">DateAdjustParams</a>
<div class="block">This class contains the parameters needed for adjusting dates holiday calendar and adjustment type.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="org.drip.param.valuation">
<!-- -->
</a>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../../org/drip/analytics/daycount/package-summary.html">org.drip.analytics.daycount</a> used by <a href="../../../../org/drip/param/valuation/package-summary.html">org.drip.param.valuation</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colOne"><a href="../../../../org/drip/analytics/daycount/class-use/ActActDCParams.html#org.drip.param.valuation">ActActDCParams</a>
<div class="block">Class contains parameters to represent the Act/Act day count.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="org.drip.product.credit">
<!-- -->
</a>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../../org/drip/analytics/daycount/package-summary.html">org.drip.analytics.daycount</a> used by <a href="../../../../org/drip/product/credit/package-summary.html">org.drip.product.credit</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colOne"><a href="../../../../org/drip/analytics/daycount/class-use/DateAdjustParams.html#org.drip.product.credit">DateAdjustParams</a>
<div class="block">This class contains the parameters needed for adjusting dates holiday calendar and adjustment type.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="org.drip.product.params">
<!-- -->
</a>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../../org/drip/analytics/daycount/package-summary.html">org.drip.analytics.daycount</a> used by <a href="../../../../org/drip/product/params/package-summary.html">org.drip.product.params</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colOne"><a href="../../../../org/drip/analytics/daycount/class-use/DateAdjustParams.html#org.drip.product.params">DateAdjustParams</a>
<div class="block">This class contains the parameters needed for adjusting dates holiday calendar and adjustment type.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="org.drip.product.rates">
<!-- -->
</a>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../../org/drip/analytics/daycount/package-summary.html">org.drip.analytics.daycount</a> used by <a href="../../../../org/drip/product/rates/package-summary.html">org.drip.product.rates</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colOne"><a href="../../../../org/drip/analytics/daycount/class-use/DateAdjustParams.html#org.drip.product.rates">DateAdjustParams</a>
<div class="block">This class contains the parameters needed for adjusting dates holiday calendar and adjustment type.</div>
</td>
</tr>
</tbody>
</table>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li>Class</li>
<li class="navBarCell1Rev">Use</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?org/drip/analytics/daycount/package-use.html" target="_top">Frames</a></li>
<li><a href="package-use.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
| {
"content_hash": "cc77771b40ceedd43202ec9bd1bcb1ad",
"timestamp": "",
"source": "github",
"line_count": 265,
"max_line_length": 290,
"avg_line_length": 42.218867924528304,
"alnum_prop": 0.6511440829460136,
"repo_name": "tectronics/rootfinder",
"id": "ad1f376c85ab6c8a8de3e1712dfabfb5ec394c6e",
"size": "11188",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "2.1/docs/Javadoc/org/drip/analytics/daycount/package-use.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "34839"
},
{
"name": "HTML",
"bytes": "77000232"
},
{
"name": "Java",
"bytes": "10842587"
}
],
"symlink_target": ""
} |
default['nut']['mode'] = 'none'
# The devices connected to the UPS.
default['nut']['devices'] = []
# UPS Settings
default['nut']['ups'] = {}
default['nut']['users'] = {}
# --------------------------------------------------------------------------
# MONITOR <system> <powervalue> <username> <password> ("master"|"slave")
#
# List systems you want to monitor. Not all of these may supply power
# to the system running upsmon, but if you want to watch it, it has to
# be in this section.
#
# You must have at least one of these declared.
#
# <system> is a UPS identifier in the form <upsname>@<hostname>[:<port>]
# like ups@localhost, su700@mybox, etc.
#
# Examples:
#
# - "su700@mybox" means a UPS called "su700" on a system called "mybox"
#
# - "fenton@bigbox:5678" is a UPS called "fenton" on a system called
# "bigbox" which runs upsd on port "5678".
#
# The UPS names like "su700" and "fenton" are set in your ups.conf
# in [brackets] which identify a section for a particular driver.
#
# If the ups.conf on host "doghouse" has a section called "snoopy", the
# identifier for it would be "snoopy@doghouse".
#
# <powervalue> is an integer - the number of power supplies that this UPS
# feeds on this system. Most computers only have one power supply, so this
# is normally set to 1. You need a pretty big or special box to have any
# other value here.
#
# You can also set this to 0 for a system that doesn't supply any power,
# but you still want to monitor. Use this when you want to hear about
# changes for a given UPS without shutting down when it goes critical,
# unless <powervalue> is 0.
#
# <username> and <password> must match an entry in that system's
# upsd.users. If your username is "monmaster" and your password is
# "blah", the upsd.users would look like this:
#
# [monmaster]
# password = blah
# upsmon master (or slave)
#
# "master" means this system will shutdown last, allowing the slaves
# time to shutdown first.
#
# "slave" means this system shuts down immediately when power goes critical.
#
# Examples:
#
# MONITOR myups@bigserver 1 monmaster blah master
# MONITOR su700@server.example.com 1 upsmon secretpass slave
# MONITOR myups@localhost 1 upsmon pass master (or slave)
default['nut']['monitors'] = {}
# {"cyberpower": {
# # <system> is a UPS identifier in the form <upsname>@<hostname>[:<port>]
# # like ups@localhost, su700@mybox, etc.
# "system" => "cyberpower@localhost",
#
# # <powervalue> is an integer - the number of power supplies that this UPS
# # feeds on this system. Most computers only have one power supply, so this
# # is normally set to 1. You need a pretty big or special box to have any
# # other value here.
# #
# # You can also set this to 0 for a system that doesn't supply any power,
# # but you still want to monitor. Use this when you want to hear about
# # changes for a given UPS without shutting down when it goes critical,
# # unless <powervalue> is 0.
# "power_value" => 1,
#
# "username" => "admin",
# "password" => "",
# "role" => "master"
# }}
# Give the number of power supplies that must be receiving power to keep
# this system running. Most systems have one power supply, so you would
# put "1" in this field.
#
# Large/expensive server type systems usually have more, and can run with
# a few missing. The HP NetServer LH4 can run with 2 out of 4, for example,
# so you'd set that to 2. The idea is to keep the box running as long
# as possible, right?
#
# Obviously you have to put the redundant supplies on different UPS circuits
# for this to make sense! See big-servers.txt in the docs subdirectory
# for more information and ideas on how to use this feature.
default['nut']['min_supplies'] = 1
# upsmon runs this command when the system needs to be brought down.
default['nut']['shutdown_command'] = '/sbin/shutdown -h +0'
# upsmon calls this to send messages when things happen
#
# This command is called with the full text of the message as one argument.
# The environment string NOTIFYTYPE will contain the type string of
# whatever caused this event to happen.
#
# Note that this is only called for NOTIFY events that have EXEC set with
# NOTIFYFLAG. See NOTIFYFLAG below for more details.
#
# Making this some sort of shell script might not be a bad idea. For more
# information and ideas, see pager.txt in the docs directory.
default['nut']['notify_command'] = nil
# Polling frequency for normal activities, measured in seconds.
#
# Adjust this to keep upsmon from flooding your network, but don't make
# it too high or it may miss certain short-lived power events.
default['nut']['poll_frequency'] = 5
# Polling frequency in seconds while UPS on battery.
#
# You can make this number lower than POLLFREQ, which will make updates
# faster when any UPS is running on battery. This is a good way to tune
# network load if you have a lot of these things running.
#
# The default is 5 seconds for both this and POLLFREQ.
default['nut']['poll_frequency_alert'] = 5
# --------------------------------------------------------------------------
# HOSTSYNC - How long upsmon will wait before giving up on another upsmon
#
# The master upsmon process uses this number when waiting for slaves to
# disconnect once it has set the forced shutdown (FSD) flag. If they
# don't disconnect after this many seconds, it goes on without them.
#
# Similarly, upsmon slave processes wait up to this interval for the
# master upsmon to set FSD when a UPS they are monitoring goes critical -
# that is, on battery and low battery. If the master doesn't do its job,
# the slaves will shut down anyway to avoid damage to the file systems.
#
# This "wait for FSD" is done to avoid races where the status changes
# to critical and back between polls by the master.
default['nut']['host_sync'] = 15
# --------------------------------------------------------------------------
# DEADTIME - Interval to wait before declaring a stale ups "dead"
#
# upsmon requires a UPS to provide status information every few seconds
# (see POLLFREQ and POLLFREQALERT) to keep things updated. If the status
# fetch fails, the UPS is marked stale. If it stays stale for more than
# DEADTIME seconds, the UPS is marked dead.
#
# A dead UPS that was last known to be on battery is assumed to have gone
# to a low battery condition. This may force a shutdown if it is providing
# a critical amount of power to your system.
#
# Note: DEADTIME should be a multiple of POLLFREQ and POLLFREQALERT.
# Otherwise you'll have "dead" UPSes simply because upsmon isn't polling
# them quickly enough. Rule of thumb: take the larger of the two
# POLLFREQ values, and multiply by 3.
default['nut']['deadtime'] = 15
# --------------------------------------------------------------------------
# POWERDOWNFLAG - Flag file for forcing UPS shutdown on the master system
#
# upsmon will create a file with this name in master mode when it's time
# to shut down the load. You should check for this file's existence in
# your shutdown scripts and run 'upsdrvctl shutdown' if it exists.
#
# See the shutdown.txt file in the docs subdirectory for more information.
default['nut']['power_down_flag'] = '/etc/killpower'
# --------------------------------------------------------------------------
# NOTIFYMSG - change messages sent by upsmon when certain events occur
#
# You can change the default messages to something else if you like.
#
# NOTIFYMSG <notify type> "message"
#
# NOTIFYMSG ONBATT "UPS %s on battery"
# NOTIFYMSG LOWBATT "UPS %s battery is low"
# NOTIFYMSG FSD "UPS %s: forced shutdown in progress"
# NOTIFYMSG COMMOK "Communications with UPS %s established"
# NOTIFYMSG COMMBAD "Communications with UPS %s lost"
# NOTIFYMSG SHUTDOWN "Auto logout and shutdown proceeding"
# NOTIFYMSG REPLBATT "UPS %s battery needs to be replaced"
# NOTIFYMSG NOCOMM "UPS %s is unavailable"
# NOTIFYMSG NOPARENT "upsmon parent process died - shutdown impossible"
#
# Note that %s is replaced with the identifier of the UPS in question.
#
# Possible values for <notify type>:
#
# ONLINE : UPS is back online
# ONBATT : UPS is on battery
# LOWBATT : UPS has a low battery (if also on battery, it's "critical")
# FSD : UPS is being shutdown by the master (FSD = "Forced Shutdown")
# COMMOK : Communications established with the UPS
# COMMBAD : Communications lost to the UPS
# SHUTDOWN : The system is being shutdown
# REPLBATT : The UPS battery is bad and needs to be replaced
# NOCOMM : A UPS is unavailable (can't be contacted for monitoring)
# NOPARENT : The process that shuts down the system has died (shutdown impossible)
default['nut']['notifications']['online']['message'] = nil
default['nut']['notifications']['on_battery']['message'] = nil
default['nut']['notifications']['low_battery']['message'] = nil
default['nut']['notifications']['forced_shutdown']['message'] = nil
default['nut']['notifications']['communication_ok']['message'] = nil
default['nut']['notifications']['communication_bad']['message'] = nil
default['nut']['notifications']['shutdown']['message'] = nil
default['nut']['notifications']['replace_battery']['message'] = nil
default['nut']['notifications']['no_communication']['message'] = nil
default['nut']['notifications']['no_parent']['message'] = nil
# --------------------------------------------------------------------------
# NOTIFYFLAG - change behavior of upsmon when NOTIFY events occur
#
# By default, upsmon sends walls (global messages to all logged in users)
# and writes to the syslog when things happen. You can change this.
#
# NOTIFYFLAG <notify type> <flag>[+<flag>][+<flag>] ...
#
# NOTIFYFLAG ONLINE SYSLOG+WALL
# NOTIFYFLAG ONBATT SYSLOG+WALL
# NOTIFYFLAG LOWBATT SYSLOG+WALL
# NOTIFYFLAG FSD SYSLOG+WALL
# NOTIFYFLAG COMMOK SYSLOG+WALL
# NOTIFYFLAG COMMBAD SYSLOG+WALL
# NOTIFYFLAG SHUTDOWN SYSLOG+WALL
# NOTIFYFLAG REPLBATT SYSLOG+WALL
# NOTIFYFLAG NOCOMM SYSLOG+WALL
# NOTIFYFLAG NOPARENT SYSLOG+WALL
#
# Possible values for the flags:
#
# SYSLOG - Write the message in the syslog
# WALL - Write the message to all users on the system
# EXEC - Execute NOTIFYCMD (see above) with the message
# IGNORE - Don't do anything
#
# If you use IGNORE, don't use any other flags on the same line.
default['nut']['notifications']['online']['flags'] = 'WALL+SYSLOG'
default['nut']['notifications']['on_battery']['flags'] = 'WALL+SYSLOG'
default['nut']['notifications']['low_battery']['flags'] = 'WALL+SYSLOG'
default['nut']['notifications']['forced_shutdown']['flags'] = 'WALL+SYSLOG'
default['nut']['notifications']['communication_ok']['flags'] = 'WALL+SYSLOG'
default['nut']['notifications']['communication_bad']['flags'] = 'WALL+SYSLOG'
default['nut']['notifications']['shutdown']['flags'] = 'WALL+SYSLOG'
default['nut']['notifications']['replace_battery']['flags'] = 'WALL+SYSLOG'
default['nut']['notifications']['no_communication']['flags'] = 'WALL+SYSLOG'
default['nut']['notifications']['no_parent']['flags'] = 'WALL+SYSLOG'
# --------------------------------------------------------------------------
# RBWARNTIME - replace battery warning time in seconds
#
# upsmon will normally warn you about a battery that needs to be replaced
# every 43200 seconds, which is 12 hours. It does this by triggering a
# NOTIFY_REPLBATT which is then handled by the usual notify structure
# you've defined above.
#
# If this number is not to your liking, override it here.
default['nut']['replace_battery_warning_time'] = 43_200
# --------------------------------------------------------------------------
# NOCOMMWARNTIME - no communications warning time in seconds
#
# upsmon will let you know through the usual notify system if it can't
# talk to any of the UPS entries that are defined in this file. It will
# trigger a NOTIFY_NOCOMM by default every 300 seconds unless you
# change the interval with this directive.
default['nut']['no_communications_warning_time'] = 300
# --------------------------------------------------------------------------
# FINALDELAY - last sleep interval before shutting down the system
#
# On a master, upsmon will wait this long after sending the NOTIFY_SHUTDOWN
# before executing your SHUTDOWNCMD. If you need to do something in between
# those events, increase this number. Remember, at this point your UPS is
# almost depleted, so don't make this too high.
#
# Alternatively, you can set this very low so you don't wait around when
# it's time to shut down. Some UPSes don't give much warning for low
# battery and will require a value of 0 here for a safe shutdown.
#
# Note: If FINALDELAY on the slave is greater than HOSTSYNC on the master,
# the master will give up waiting for the slave to disconnect.
default['nut']['final_delay'] = 5
default['nut']['run_as_user'] = nil
# =======================================================================
# MAXAGE <seconds>
# MAXAGE 15
#
# This defaults to 15 seconds. After a UPS driver has stopped updating
# the data for this many seconds, upsd marks it stale and stops making
# that information available to clients. After all, the only thing worse
# than no data is bad data.
#
# You should only use this if your driver has difficulties keeping
# the data fresh within the normal 15 second interval. Watch the syslog
# for notifications from upsd about staleness.
default['nut']['max_age'] = nil
# =======================================================================
# STATEPATH <path>
# STATEPATH /var/run/nut
#
# Tell upsd to look for the driver state sockets in 'path' rather
# than the default that was compiled into the program.
default['nut']['state_path'] = nil
# =======================================================================
# LISTEN <address> [<port>]
# LISTEN 127.0.0.1 3493
# LISTEN ::1 3493
#
# This defaults to the localhost listening addresses and port 3493.
# In case of IP v4 or v6 disabled kernel, only the available one will be used.
#
# You may specify each interface you want upsd to listen on for connections,
# optionally with a port number.
#
# You may need this if you have multiple interfaces on your machine and
# you don't want upsd to listen to all interfaces (for instance on a
# firewall, you may not want to listen to the external interface).
#
# This will only be read at startup of upsd. If you make changes here,
# you'll need to restart upsd, reload will have no effect.
default['nut']['listen'] = ['127.0.0.1']
# =======================================================================
# MAXCONN <connections>
# MAXCONN 1024
#
# This defaults to maximum number allowed on your system. Each UPS, each
# LISTEN address and each client count as one connection. If the server
# runs out of connections, it will no longer accept new incoming client
# connections. Only set this if you know exactly what you're doing.
default['nut']['max_connections'] = nil
# =======================================================================
# CERTFILE <certificate file>
#
# When compiled with SSL support, you can enter the certificate file here.
# The certificates must be in PEM format and must be sorted starting with
# the subject's certificate (server certificate), followed by intermediate
# CA certificates (if applicable_ and the highest level (root) CA. It should
# end with the server key. See 'docs/security.txt' or the Security chapter of
# NUT user manual for more information on the SSL support in NUT.
default['nut']['certificate_file'] = nil
| {
"content_hash": "ae284a0153179833fdbbeedbb8780819",
"timestamp": "",
"source": "github",
"line_count": 354,
"max_line_length": 82,
"avg_line_length": 43.6045197740113,
"alnum_prop": 0.6802928219746048,
"repo_name": "cvisionlabops/cookbooks-nut",
"id": "c7addcfd4dfe4fe6e1509c134e9fdda8c79722f6",
"size": "16128",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "attributes/default.rb",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "5540"
},
{
"name": "Ruby",
"bytes": "28887"
}
],
"symlink_target": ""
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<META http-equiv="Content-Type" content="text/html; charset=UTF-8">
<meta content="Apache Forrest" name="Generator">
<meta name="Forrest-version" content="0.8">
<meta name="Forrest-skin-name" content="pelt">
<title>
HOD Scheduler
</title>
<link type="text/css" href="skin/basic.css" rel="stylesheet">
<link media="screen" type="text/css" href="skin/screen.css" rel="stylesheet">
<link media="print" type="text/css" href="skin/print.css" rel="stylesheet">
<link type="text/css" href="skin/profile.css" rel="stylesheet">
<script src="skin/getBlank.js" language="javascript" type="text/javascript"></script><script src="skin/getMenu.js" language="javascript" type="text/javascript"></script><script src="skin/fontsize.js" language="javascript" type="text/javascript"></script>
<link rel="shortcut icon" href="images/favicon.ico">
</head>
<body onload="init()">
<script type="text/javascript">ndeSetTextSize();</script>
<div id="top">
<!--+
|breadtrail
+-->
<div class="breadtrail">
<a href="http://www.apache.org/">Apache</a> > <a href="http://hadoop.apache.org/">Hadoop</a> > <a href="http://hadoop.apache.org/core/">Core</a><script src="skin/breadcrumbs.js" language="JavaScript" type="text/javascript"></script>
</div>
<!--+
|header
+-->
<div class="header">
<!--+
|start group logo
+-->
<div class="grouplogo">
<a href="http://hadoop.apache.org/"><img class="logoImage" alt="Hadoop" src="images/hadoop-logo.jpg" title="Apache Hadoop"></a>
</div>
<!--+
|end group logo
+-->
<!--+
|start Project Logo
+-->
<div class="projectlogo">
<a href="http://hadoop.apache.org/core/"><img class="logoImage" alt="Hadoop" src="images/hadoop-logo-2.gif" title="Scalable Computing Platform"></a>
</div>
<!--+
|end Project Logo
+-->
<!--+
|start Search
+-->
<div class="searchbox">
<form action="http://www.google.com/search" method="get" class="roundtopsmall">
<input value="hadoop.apache.org" name="sitesearch" type="hidden"><input onFocus="getBlank (this, 'Search the site with google');" size="25" name="q" id="query" type="text" value="Search the site with google">
<input name="Search" value="Search" type="submit">
</form>
</div>
<!--+
|end search
+-->
<!--+
|start Tabs
+-->
<ul id="tabs">
<li>
<a class="unselected" href="http://hadoop.apache.org/core/">Project</a>
</li>
<li>
<a class="unselected" href="http://wiki.apache.org/hadoop">Wiki</a>
</li>
<li class="current">
<a class="selected" href="index.html">Hadoop 1.0.1 Documentation</a>
</li>
</ul>
<!--+
|end Tabs
+-->
</div>
</div>
<div id="main">
<div id="publishedStrip">
<!--+
|start Subtabs
+-->
<div id="level2tabs"></div>
<!--+
|end Endtabs
+-->
<script type="text/javascript"><!--
document.write("Last Published: " + document.lastModified);
// --></script>
</div>
<!--+
|breadtrail
+-->
<div class="breadtrail">
</div>
<!--+
|start Menu, mainarea
+-->
<!--+
|start Menu
+-->
<div id="menu">
<div onclick="SwitchMenu('menu_1.1', 'skin/')" id="menu_1.1Title" class="menutitle">Getting Started</div>
<div id="menu_1.1" class="menuitemgroup">
<div class="menuitem">
<a href="index.html">Overview</a>
</div>
<div class="menuitem">
<a href="single_node_setup.html">Single Node Setup</a>
</div>
<div class="menuitem">
<a href="cluster_setup.html">Cluster Setup</a>
</div>
</div>
<div onclick="SwitchMenu('menu_1.2', 'skin/')" id="menu_1.2Title" class="menutitle">Guides</div>
<div id="menu_1.2" class="menuitemgroup">
<div class="menuitem">
<a href="HttpAuthentication.html">Authentication for Hadoop HTTP web-consoles</a>
</div>
</div>
<div onclick="SwitchMenu('menu_selected_1.3', 'skin/')" id="menu_selected_1.3Title" class="menutitle" style="background-image: url('skin/images/chapter_open.gif');">MapReduce</div>
<div id="menu_selected_1.3" class="selectedmenuitemgroup" style="display: block;">
<div class="menuitem">
<a href="mapred_tutorial.html">MapReduce Tutorial</a>
</div>
<div class="menuitem">
<a href="streaming.html">Hadoop Streaming</a>
</div>
<div class="menuitem">
<a href="commands_manual.html">Hadoop Commands</a>
</div>
<div class="menuitem">
<a href="distcp.html">DistCp</a>
</div>
<div class="menuitem">
<a href="vaidya.html">Vaidya</a>
</div>
<div class="menuitem">
<a href="hadoop_archives.html">Hadoop Archives</a>
</div>
<div class="menuitem">
<a href="gridmix.html">Gridmix</a>
</div>
<div class="menuitem">
<a href="capacity_scheduler.html">Capacity Scheduler</a>
</div>
<div class="menuitem">
<a href="fair_scheduler.html">Fair Scheduler</a>
</div>
<div class="menupage">
<div class="menupagetitle">Hod Scheduler</div>
</div>
</div>
<div onclick="SwitchMenu('menu_1.4', 'skin/')" id="menu_1.4Title" class="menutitle">HDFS</div>
<div id="menu_1.4" class="menuitemgroup">
<div class="menuitem">
<a href="hdfs_user_guide.html">HDFS Users </a>
</div>
<div class="menuitem">
<a href="hdfs_design.html">HDFS Architecture</a>
</div>
<div class="menuitem">
<a href="hdfs_permissions_guide.html">Permissions</a>
</div>
<div class="menuitem">
<a href="hdfs_quota_admin_guide.html">Quotas</a>
</div>
<div class="menuitem">
<a href="SLG_user_guide.html">Synthetic Load Generator</a>
</div>
<div class="menuitem">
<a href="webhdfs.html">WebHDFS REST API</a>
</div>
<div class="menuitem">
<a href="libhdfs.html">C API libhdfs</a>
</div>
</div>
<div onclick="SwitchMenu('menu_1.5', 'skin/')" id="menu_1.5Title" class="menutitle">Common</div>
<div id="menu_1.5" class="menuitemgroup">
<div class="menuitem">
<a href="deployment_layout.html">Deployment Layout</a>
</div>
<div class="menuitem">
<a href="file_system_shell.html">File System Shell</a>
</div>
<div class="menuitem">
<a href="service_level_auth.html">Service Level Authorization</a>
</div>
<div class="menuitem">
<a href="native_libraries.html">Native Libraries</a>
</div>
</div>
<div onclick="SwitchMenu('menu_1.6', 'skin/')" id="menu_1.6Title" class="menutitle">Miscellaneous</div>
<div id="menu_1.6" class="menuitemgroup">
<div class="menuitem">
<a href="Secure_Impersonation.html">Secure Impersonation</a>
</div>
<div class="menuitem">
<a href="api/index.html">API Docs</a>
</div>
<div class="menuitem">
<a href="jdiff/changes.html">API Changes</a>
</div>
<div class="menuitem">
<a href="http://wiki.apache.org/hadoop/">Wiki</a>
</div>
<div class="menuitem">
<a href="http://wiki.apache.org/hadoop/FAQ">FAQ</a>
</div>
<div class="menuitem">
<a href="releasenotes.html">Release Notes</a>
</div>
<div class="menuitem">
<a href="changes.html">Change Log</a>
</div>
</div>
<div id="credit"></div>
<div id="roundbottom">
<img style="display: none" class="corner" height="15" width="15" alt="" src="skin/images/rc-b-l-15-1body-2menu-3menu.png"></div>
<!--+
|alternative credits
+-->
<div id="credit2"></div>
</div>
<!--+
|end Menu
+-->
<!--+
|start content
+-->
<div id="content">
<div title="Portable Document Format" class="pdflink">
<a class="dida" href="hod_scheduler.pdf"><img alt="PDF -icon" src="skin/images/pdfdoc.gif" class="skin"><br>
PDF</a>
</div>
<h1>
HOD Scheduler
</h1>
<div id="minitoc-area">
<ul class="minitoc">
<li>
<a href="#Introduction">Introduction</a>
</li>
<li>
<a href="#HOD+Users">HOD Users</a>
<ul class="minitoc">
<li>
<a href="#Getting+Started"> Getting Started</a>
<ul class="minitoc">
<li>
<a href="#A+Typical+HOD+Session">A Typical HOD Session</a>
</li>
<li>
<a href="#Running+Hadoop+Scripts+Using+HOD">Running Hadoop Scripts Using HOD</a>
</li>
</ul>
</li>
<li>
<a href="#HOD+Features"> HOD Features </a>
<ul class="minitoc">
<li>
<a href="#Provisioning+and+Managing+Hadoop+Clusters"> Provisioning and Managing Hadoop Clusters </a>
</li>
<li>
<a href="#Using+a+Tarball+to+Distribute+Hadoop"> Using a Tarball to Distribute Hadoop </a>
</li>
<li>
<a href="#Using+an+External+HDFS"> Using an External HDFS </a>
</li>
<li>
<a href="#Options+for+Configuring+Hadoop"> Options for Configuring Hadoop </a>
</li>
<li>
<a href="#Viewing+Hadoop+Web-UIs"> Viewing Hadoop Web-UIs </a>
</li>
<li>
<a href="#Collecting+and+Viewing+Hadoop+Logs"> Collecting and Viewing Hadoop Logs </a>
</li>
<li>
<a href="#Auto-deallocation+of+Idle+Clusters"> Auto-deallocation of Idle Clusters </a>
</li>
<li>
<a href="#Specifying+Additional+Job+Attributes"> Specifying Additional Job Attributes </a>
</li>
<li>
<a href="#Capturing+HOD+Exit+Codes+in+Torque"> Capturing HOD Exit Codes in Torque </a>
</li>
<li>
<a href="#Command+Line"> Command Line</a>
</li>
<li>
<a href="#Options+Configuring+HOD"> Options Configuring HOD </a>
</li>
</ul>
</li>
<li>
<a href="#Troubleshooting-N104A6"> Troubleshooting </a>
<ul class="minitoc">
<li>
<a href="#HOD+Hangs+During+Allocation">HOD Hangs During Allocation </a>
</li>
<li>
<a href="#HOD+Hangs+During+Deallocation">HOD Hangs During Deallocation </a>
</li>
<li>
<a href="#HOD+Fails+With+an+Error+Code+and+Error+Message">HOD Fails With an Error Code and Error Message </a>
</li>
<li>
<a href="#Hadoop+DFSClient+Warns+with+a%0A++NotReplicatedYetException">Hadoop DFSClient Warns with a
NotReplicatedYetException</a>
</li>
<li>
<a href="#Hadoop+Jobs+Not+Running+on+a+Successfully+Allocated+Cluster"> Hadoop Jobs Not Running on a Successfully Allocated Cluster </a>
</li>
<li>
<a href="#My+Hadoop+Job+Got+Killed"> My Hadoop Job Got Killed </a>
</li>
<li>
<a href="#Hadoop+Job+Fails+with+Message%3A+%27Job+tracker+still+initializing%27"> Hadoop Job Fails with Message: 'Job tracker still initializing' </a>
</li>
<li>
<a href="#The+Exit+Codes+For+HOD+Are+Not+Getting+Into+Torque"> The Exit Codes For HOD Are Not Getting Into Torque </a>
</li>
<li>
<a href="#The+Hadoop+Logs+are+Not+Uploaded+to+HDFS"> The Hadoop Logs are Not Uploaded to HDFS </a>
</li>
<li>
<a href="#Locating+Ringmaster+Logs"> Locating Ringmaster Logs </a>
</li>
<li>
<a href="#Locating+Hodring+Logs"> Locating Hodring Logs </a>
</li>
</ul>
</li>
</ul>
</li>
<li>
<a href="#HOD+Administrators">HOD Administrators</a>
<ul class="minitoc">
<li>
<a href="#Getting+Started-N10781">Getting Started</a>
</li>
<li>
<a href="#Prerequisites">Prerequisites</a>
</li>
<li>
<a href="#Resource+Manager">Resource Manager</a>
</li>
<li>
<a href="#Installing+HOD">Installing HOD</a>
</li>
<li>
<a href="#Configuring+HOD">Configuring HOD</a>
<ul class="minitoc">
<li>
<a href="#Minimal+Configuration">Minimal Configuration</a>
</li>
<li>
<a href="#Advanced+Configuration">Advanced Configuration</a>
</li>
</ul>
</li>
<li>
<a href="#Running+HOD">Running HOD</a>
</li>
<li>
<a href="#Supporting+Tools+and+Utilities">Supporting Tools and Utilities</a>
<ul class="minitoc">
<li>
<a href="#logcondense.py+-+Manage+Log+Files">logcondense.py - Manage Log Files</a>
</li>
<li>
<a href="#checklimits.sh+-+Monitor+Resource+Limits">checklimits.sh - Monitor Resource Limits</a>
</li>
<li>
<a href="#verify-account+Script">verify-account Script</a>
</li>
</ul>
</li>
</ul>
</li>
<li>
<a href="#HOD+Configuration">HOD Configuration</a>
<ul class="minitoc">
<li>
<a href="#Getting+Started-N109E3">Getting Started</a>
</li>
<li>
<a href="#Configuation+Options">Configuation Options</a>
<ul class="minitoc">
<li>
<a href="#common+options">common options</a>
</li>
<li>
<a href="#hod+options">hod options</a>
</li>
<li>
<a href="#resource_manager+options">resource_manager options</a>
</li>
<li>
<a href="#ringmaster+options">ringmaster options</a>
</li>
<li>
<a href="#gridservice-hdfs+options">gridservice-hdfs options</a>
</li>
<li>
<a href="#gridservice-mapred+options">gridservice-mapred options</a>
</li>
<li>
<a href="#hodring+options">hodring options</a>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
<a name="N1000F"></a><a name="Introduction"></a>
<h2 class="h3">Introduction</h2>
<div class="section">
<p>Hadoop On Demand (HOD) is a system for provisioning and managing independent Hadoop MapReduce and
Hadoop Distributed File System (HDFS) instances on a shared cluster of nodes. HOD is a tool that makes it easy
for administrators and users to quickly setup and use Hadoop. HOD is also a very useful tool for Hadoop developers
and testers who need to share a physical cluster for testing their own Hadoop versions. </p>
<p>HOD uses the Torque resource manager to do node allocation. On the allocated nodes, it can start Hadoop
MapReduce and HDFS daemons. It automatically generates the appropriate configuration files (hadoop-site.xml)
for the Hadoop daemons and client. HOD also has the capability to distribute Hadoop to the nodes in the virtual
cluster that it allocates. HOD supports Hadoop from version 0.15 onwards.</p>
</div>
<a name="N1001C"></a><a name="HOD+Users"></a>
<h2 class="h3">HOD Users</h2>
<div class="section">
<p>This section shows users how to get started using HOD, reviews various HOD features and command line options,
and provides detailed troubleshooting help.</p>
<a name="N10025"></a><a name="Getting+Started"></a>
<h3 class="h4"> Getting Started</h3>
<a name="Getting_Started_Using_HOD_0_4" id="Getting_Started_Using_HOD_0_4"></a>
<p>In this section, we shall see a step-by-step introduction on how to use HOD for the most basic operations. Before
following these steps, it is assumed that HOD and its dependent hardware and software components are setup and
configured correctly. This is a step that is generally performed by system administrators of the cluster.</p>
<p>The HOD user interface is a command line utility called <span class="codefrag">hod</span>. It is driven by a configuration file,
that is typically setup for users by system administrators. Users can override this configuration when using
the <span class="codefrag">hod</span>, which is described later in this documentation. The configuration file can be specified in
two ways when using <span class="codefrag">hod</span>, as described below: </p>
<ul>
<li> Specify it on command line, using the -c option. Such as
<span class="codefrag">hod <operation> <required-args> -c path-to-the-configuration-file [other-options]</span>
</li>
<li> Set up an environment variable <em>HOD_CONF_DIR</em> where <span class="codefrag">hod</span> will be run.
This should be pointed to a directory on the local file system, containing a file called <em>hodrc</em>.
Note that this is analogous to the <em>HADOOP_CONF_DIR</em> and <em>hadoop-site.xml</em> file for Hadoop.
If no configuration file is specified on the command line, <span class="codefrag">hod</span> shall look for the <em>HOD_CONF_DIR</em>
environment variable and a <em>hodrc</em> file under that.</li>
</ul>
<p>In examples listed below, we shall not explicitly point to the configuration option, assuming it is correctly specified.</p>
<a name="N10062"></a><a name="A+Typical+HOD+Session"></a>
<h4>A Typical HOD Session</h4>
<a name="HOD_Session" id="HOD_Session"></a>
<p>A typical session of HOD will involve at least three steps: allocate, run hadoop jobs, deallocate. In order to do this,
perform the following steps.</p>
<p>
<strong> Create a Cluster Directory </strong>
</p>
<a name="Create_a_Cluster_Directory" id="Create_a_Cluster_Directory"></a>
<p>The <em>cluster directory</em> is a directory on the local file system where <span class="codefrag">hod</span> will generate the
Hadoop configuration, <em>hadoop-site.xml</em>, corresponding to the cluster it allocates. Pass this directory to the
<span class="codefrag">hod</span> operations as stated below. If the cluster directory passed doesn't already exist, HOD will automatically
try to create it and use it. Once a cluster is allocated, a user can utilize it to run Hadoop jobs by specifying the cluster
directory as the Hadoop --config option. </p>
<p>
<strong>Operation allocate</strong>
</p>
<a name="Operation_allocate" id="Operation_allocate"></a>
<p>The <em>allocate</em> operation is used to allocate a set of nodes and install and provision Hadoop on them.
It has the following syntax. Note that it requires a cluster_dir ( -d, --hod.clusterdir) and the number of nodes
(-n, --hod.nodecount) needed to be allocated:</p>
<pre class="code">$ hod allocate -d cluster_dir -n number_of_nodes [OPTIONS]</pre>
<p>If the command completes successfully, then <span class="codefrag">cluster_dir/hadoop-site.xml</span> will be generated and
will contain information about the allocated cluster. It will also print out the information about the Hadoop web UIs.</p>
<p>An example run of this command produces the following output. Note in this example that <span class="codefrag">~/hod-clusters/test</span>
is the cluster directory, and we are allocating 5 nodes:</p>
<pre class="code">
$ hod allocate -d ~/hod-clusters/test -n 5
INFO - HDFS UI on http://foo1.bar.com:53422
INFO - Mapred UI on http://foo2.bar.com:55380</pre>
<p>
<strong> Running Hadoop jobs using the allocated cluster </strong>
</p>
<a name="Running_Hadoop_jobs_using_the_al" id="Running_Hadoop_jobs_using_the_al"></a>
<p>Now, one can run Hadoop jobs using the allocated cluster in the usual manner. This assumes variables like <em>JAVA_HOME</em>
and path to the Hadoop installation are set up correctly.:</p>
<pre class="code">$ hadoop --config cluster_dir hadoop_command hadoop_command_args</pre>
<p>or</p>
<pre class="code">
$ export HADOOP_CONF_DIR=cluster_dir
$ hadoop hadoop_command hadoop_command_args</pre>
<p>Continuing our example, the following command will run a wordcount example on the allocated cluster:</p>
<pre class="code">$ hadoop --config ~/hod-clusters/test jar /path/to/hadoop/hadoop-examples.jar wordcount /path/to/input /path/to/output</pre>
<p>or</p>
<pre class="code">
$ export HADOOP_CONF_DIR=~/hod-clusters/test
$ hadoop jar /path/to/hadoop/hadoop-examples.jar wordcount /path/to/input /path/to/output</pre>
<p>
<strong> Operation deallocate</strong>
</p>
<a name="Operation_deallocate" id="Operation_deallocate"></a>
<p>The <em>deallocate</em> operation is used to release an allocated cluster. When finished with a cluster, deallocate must be
run so that the nodes become free for others to use. The <em>deallocate</em> operation has the following syntax. Note that it
requires the cluster_dir (-d, --hod.clusterdir) argument:</p>
<pre class="code">$ hod deallocate -d cluster_dir</pre>
<p>Continuing our example, the following command will deallocate the cluster:</p>
<pre class="code">$ hod deallocate -d ~/hod-clusters/test</pre>
<p>As can be seen, HOD allows the users to allocate a cluster, and use it flexibly for running Hadoop jobs. For example, users
can run multiple jobs in parallel on the same cluster, by running hadoop from multiple shells pointing to the same configuration.</p>
<a name="N100E4"></a><a name="Running+Hadoop+Scripts+Using+HOD"></a>
<h4>Running Hadoop Scripts Using HOD</h4>
<a name="HOD_Script_Mode" id="HOD_Script_Mode"></a>
<p>The HOD <em>script operation</em> combines the operations of allocating, using and deallocating a cluster into a single operation.
This is very useful for users who want to run a script of hadoop jobs and let HOD handle the cleanup automatically once the script completes.
In order to run hadoop scripts using <span class="codefrag">hod</span>, do the following:</p>
<p>
<strong> Create a script file </strong>
</p>
<a name="Create_a_script_file" id="Create_a_script_file"></a>
<p>This will be a regular shell script that will typically contain hadoop commands, such as:</p>
<pre class="code">$ hadoop jar jar_file options</pre>
<p>However, the user can add any valid commands as part of the script. HOD will execute this script setting <em>HADOOP_CONF_DIR</em>
automatically to point to the allocated cluster. So users do not need to worry about this. The users however need to specify a cluster directory
just like when using the allocate operation.</p>
<p>
<strong> Running the script </strong>
</p>
<a name="Running_the_script" id="Running_the_script"></a>
<p>The syntax for the <em>script operation</em> as is as follows. Note that it requires a cluster directory ( -d, --hod.clusterdir), number of
nodes (-n, --hod.nodecount) and a script file (-s, --hod.script):</p>
<pre class="code">$ hod script -d cluster_directory -n number_of_nodes -s script_file</pre>
<p>Note that HOD will deallocate the cluster as soon as the script completes, and this means that the script must not complete until the
hadoop jobs themselves are completed. Users must take care of this while writing the script. </p>
<a name="N1011C"></a><a name="HOD+Features"></a>
<h3 class="h4"> HOD Features </h3>
<a name="HOD_0_4_Features" id="HOD_0_4_Features"></a><a name="N10124"></a><a name="Provisioning+and+Managing+Hadoop+Clusters"></a>
<h4> Provisioning and Managing Hadoop Clusters </h4>
<a name="Provisioning_and_Managing_Hadoop" id="Provisioning_and_Managing_Hadoop"></a>
<p>The primary feature of HOD is to provision Hadoop MapReduce and HDFS clusters. This is described above in the Getting Started section.
Also, as long as nodes are available, and organizational policies allow, a user can use HOD to allocate multiple MapReduce clusters simultaneously.
The user would need to specify different paths for the <span class="codefrag">cluster_dir</span> parameter mentioned above for each cluster he/she allocates.
HOD provides the <em>list</em> and the <em>info</em> operations to enable managing multiple clusters.</p>
<p>
<strong> Operation list</strong>
</p>
<a name="Operation_list" id="Operation_list"></a>
<p>The list operation lists all the clusters allocated so far by a user. The cluster directory where the hadoop-site.xml is stored for the cluster,
and its status vis-a-vis connectivity with the JobTracker and/or HDFS is shown. The list operation has the following syntax:</p>
<pre class="code">$ hod list</pre>
<p>
<strong> Operation info</strong>
</p>
<a name="Operation_info" id="Operation_info"></a>
<p>The info operation shows information about a given cluster. The information shown includes the Torque job id, and locations of the important
daemons like the HOD Ringmaster process, and the Hadoop JobTracker and NameNode daemons. The info operation has the following syntax.
Note that it requires a cluster directory (-d, --hod.clusterdir):</p>
<pre class="code">$ hod info -d cluster_dir</pre>
<p>The <span class="codefrag">cluster_dir</span> should be a valid cluster directory specified in an earlier <em>allocate</em> operation.</p>
<a name="N1015B"></a><a name="Using+a+Tarball+to+Distribute+Hadoop"></a>
<h4> Using a Tarball to Distribute Hadoop </h4>
<a name="Using_a_tarball_to_distribute_Ha" id="Using_a_tarball_to_distribute_Ha"></a>
<p>When provisioning Hadoop, HOD can use either a pre-installed Hadoop on the cluster nodes or distribute and install a Hadoop tarball as part
of the provisioning operation. If the tarball option is being used, there is no need to have a pre-installed Hadoop on the cluster nodes, nor a need
to use a pre-installed one. This is especially useful in a development / QE environment where individual developers may have different versions of
Hadoop to test on a shared cluster. </p>
<p>In order to use a pre-installed Hadoop, you must specify, in the hodrc, the <span class="codefrag">pkgs</span> option in the <span class="codefrag">gridservice-hdfs</span>
and <span class="codefrag">gridservice-mapred</span> sections. This must point to the path where Hadoop is installed on all nodes of the cluster.</p>
<p>The syntax for specifying tarball is as follows:</p>
<pre class="code">$ hod allocate -d cluster_dir -n number_of_nodes -t hadoop_tarball_location</pre>
<p>For example, the following command allocates Hadoop provided by the tarball <span class="codefrag">~/share/hadoop.tar.gz</span>:</p>
<pre class="code">$ hod allocate -d ~/hadoop-cluster -n 10 -t ~/share/hadoop.tar.gz</pre>
<p>Similarly, when using hod script, the syntax is as follows:</p>
<pre class="code">$ hod script -d cluster_directory -s script_file -n number_of_nodes -t hadoop_tarball_location</pre>
<p>The hadoop_tarball specified in the syntax above should point to a path on a shared file system that is accessible from all the compute nodes.
Currently, HOD only supports NFS mounted file systems.</p>
<p>
<em>Note:</em>
</p>
<ul>
<li> For better distribution performance it is recommended that the Hadoop tarball contain only the libraries and binaries, and not the source or documentation.</li>
<li> When you want to run jobs against a cluster allocated using the tarball, you must use a compatible version of hadoop to submit your jobs.
The best would be to untar and use the version that is present in the tarball itself.</li>
<li> You need to make sure that there are no Hadoop configuration files, hadoop-env.sh and hadoop-site.xml, present in the conf directory of the
tarred distribution. The presence of these files with incorrect values could make the cluster allocation to fail.</li>
</ul>
<a name="N1019D"></a><a name="Using+an+External+HDFS"></a>
<h4> Using an External HDFS </h4>
<a name="Using_an_external_HDFS" id="Using_an_external_HDFS"></a>
<p>In typical Hadoop clusters provisioned by HOD, HDFS is already set up statically (without using HOD). This allows data to persist in HDFS after
the HOD provisioned clusters is deallocated. To use a statically configured HDFS, your hodrc must point to an external HDFS. Specifically, set the
following options to the correct values in the section <span class="codefrag">gridservice-hdfs</span> of the hodrc:</p>
<pre class="code">
external = true
host = Hostname of the HDFS NameNode
fs_port = Port number of the HDFS NameNode
info_port = Port number of the HDFS NameNode web UI
</pre>
<p>
<em>Note:</em> You can also enable this option from command line. That is, to use a static HDFS, you will need to say: <br>
</p>
<pre class="code">$ hod allocate -d cluster_dir -n number_of_nodes --gridservice-hdfs.external</pre>
<p>HOD can be used to provision an HDFS cluster as well as a MapReduce cluster, if required. To do so, set the following option in the section
<span class="codefrag">gridservice-hdfs</span> of the hodrc:</p>
<pre class="code">external = false</pre>
<a name="N101C4"></a><a name="Options+for+Configuring+Hadoop"></a>
<h4> Options for Configuring Hadoop </h4>
<a name="Options_for_Configuring_Hadoop" id="Options_for_Configuring_Hadoop"></a>
<p>HOD provides a very convenient mechanism to configure both the Hadoop daemons that it provisions and also the hadoop-site.xml that
it generates on the client side. This is done by specifying Hadoop configuration parameters in either the HOD configuration file, or from the
command line when allocating clusters.</p>
<p>
<strong> Configuring Hadoop Daemons </strong>
</p>
<a name="Configuring_Hadoop_Daemons" id="Configuring_Hadoop_Daemons"></a>
<p>For configuring the Hadoop daemons, you can do the following:</p>
<p>For MapReduce, specify the options as a comma separated list of key-value pairs to the <span class="codefrag">server-params</span> option in the
<span class="codefrag">gridservice-mapred</span> section. Likewise for a dynamically provisioned HDFS cluster, specify the options in the
<span class="codefrag">server-params</span> option in the <span class="codefrag">gridservice-hdfs</span> section. If these parameters should be marked as
<em>final</em>, then include these in the <span class="codefrag">final-server-params</span> option of the appropriate section.</p>
<p>For example:</p>
<pre class="code">
server-params = mapred.reduce.parallel.copies=20,io.sort.factor=100,io.sort.mb=128,io.file.buffer.size=131072
final-server-params = mapred.child.java.opts=-Xmx512m,dfs.block.size=134217728,fs.inmemory.size.mb=128
</pre>
<p>In order to provide the options from command line, you can use the following syntax:</p>
<p>For configuring the MapReduce daemons use:</p>
<pre class="code">$ hod allocate -d cluster_dir -n number_of_nodes -Mmapred.reduce.parallel.copies=20 -Mio.sort.factor=100</pre>
<p>In the example above, the <em>mapred.reduce.parallel.copies</em> parameter and the <em>io.sort.factor</em>
parameter will be appended to the other <span class="codefrag">server-params</span> or if they already exist in <span class="codefrag">server-params</span>,
will override them. In order to specify these are <em>final</em> parameters, you can use:</p>
<pre class="code">$ hod allocate -d cluster_dir -n number_of_nodes -Fmapred.reduce.parallel.copies=20 -Fio.sort.factor=100</pre>
<p>However, note that final parameters cannot be overwritten from command line. They can only be appended if not already specified.</p>
<p>Similar options exist for configuring dynamically provisioned HDFS daemons. For doing so, replace -M with -H and -F with -S.</p>
<p>
<strong> Configuring Hadoop Job Submission (Client) Programs </strong>
</p>
<a name="Configuring_Hadoop_Job_Submissio" id="Configuring_Hadoop_Job_Submissio"></a>
<p>As mentioned above, if the allocation operation completes successfully then <span class="codefrag">cluster_dir/hadoop-site.xml</span> will be generated
and will contain information about the allocated cluster's JobTracker and NameNode. This configuration is used when submitting jobs to the cluster.
HOD provides an option to include additional Hadoop configuration parameters into this file. The syntax for doing so is as follows:</p>
<pre class="code">$ hod allocate -d cluster_dir -n number_of_nodes -Cmapred.userlog.limit.kb=200 -Cmapred.child.java.opts=-Xmx512m</pre>
<p>In this example, the <em>mapred.userlog.limit.kb</em> and <em>mapred.child.java.opts</em> options will be included into
the hadoop-site.xml that is generated by HOD.</p>
<a name="N10233"></a><a name="Viewing+Hadoop+Web-UIs"></a>
<h4> Viewing Hadoop Web-UIs </h4>
<a name="Viewing_Hadoop_Web_UIs" id="Viewing_Hadoop_Web_UIs"></a>
<p>The HOD allocation operation prints the JobTracker and NameNode web UI URLs. For example:</p>
<pre class="code">
$ hod allocate -d ~/hadoop-cluster -n 10 -c ~/hod-conf-dir/hodrc
INFO - HDFS UI on http://host242.foo.com:55391
INFO - Mapred UI on http://host521.foo.com:54874
</pre>
<p>The same information is also available via the <em>info</em> operation described above.</p>
<a name="N10248"></a><a name="Collecting+and+Viewing+Hadoop+Logs"></a>
<h4> Collecting and Viewing Hadoop Logs </h4>
<a name="Collecting_and_Viewing_Hadoop_Lo" id="Collecting_and_Viewing_Hadoop_Lo"></a>
<p>To get the Hadoop logs of the daemons running on one of the allocated nodes: </p>
<ul>
<li> Log into the node of interest. If you want to look at the logs of the JobTracker or NameNode, then you can find the node running these by
using the <em>list</em> and <em>info</em> operations mentioned above.</li>
<li> Get the process information of the daemon of interest (for example, <span class="codefrag">ps ux | grep TaskTracker</span>)</li>
<li> In the process information, search for the value of the variable <span class="codefrag">-Dhadoop.log.dir</span>. Typically this will be a decendent directory
of the <span class="codefrag">hodring.temp-dir</span> value from the hod configuration file.</li>
<li> Change to the <span class="codefrag">hadoop.log.dir</span> directory to view daemon and user logs.</li>
</ul>
<p>HOD also provides a mechanism to collect logs when a cluster is being deallocated and persist them into a file system, or an externally
configured HDFS. By doing so, these logs can be viewed after the jobs are completed and the nodes are released. In order to do so, configure
the log-destination-uri to a URI as follows:</p>
<pre class="code">
log-destination-uri = hdfs://host123:45678/user/hod/logs
log-destination-uri = file://path/to/store/log/files</pre>
<p>Under the root directory specified above in the path, HOD will create a path user_name/torque_jobid and store gzipped log files for each
node that was part of the job.</p>
<p>Note that to store the files to HDFS, you may need to configure the <span class="codefrag">hodring.pkgs</span> option with the Hadoop version that
matches the HDFS mentioned. If not, HOD will try to use the Hadoop version that it is using to provision the Hadoop cluster itself.</p>
<a name="N10284"></a><a name="Auto-deallocation+of+Idle+Clusters"></a>
<h4> Auto-deallocation of Idle Clusters </h4>
<a name="Auto_deallocation_of_Idle_Cluste" id="Auto_deallocation_of_Idle_Cluste"></a>
<p>HOD automatically deallocates clusters that are not running Hadoop jobs for a given period of time. Each HOD allocation includes a
monitoring facility that constantly checks for running Hadoop jobs. If it detects no running Hadoop jobs for a given period, it will automatically
deallocate its own cluster and thus free up nodes which are not being used effectively.</p>
<p>
<em>Note:</em> While the cluster is deallocated, the <em>cluster directory</em> is not cleaned up automatically. The user must
deallocate this cluster through the regular <em>deallocate</em> operation to clean this up.</p>
<a name="N1029A"></a><a name="Specifying+Additional+Job+Attributes"></a>
<h4> Specifying Additional Job Attributes </h4>
<a name="Specifying_Additional_Job_Attrib" id="Specifying_Additional_Job_Attrib"></a>
<p>HOD allows the user to specify a wallclock time and a name (or title) for a Torque job. </p>
<p>The wallclock time is the estimated amount of time for which the Torque job will be valid. After this time has expired, Torque will
automatically delete the job and free up the nodes. Specifying the wallclock time can also help the job scheduler to better schedule
jobs, and help improve utilization of cluster resources.</p>
<p>To specify the wallclock time, use the following syntax:</p>
<pre class="code">$ hod allocate -d cluster_dir -n number_of_nodes -l time_in_seconds</pre>
<p>The name or title of a Torque job helps in user friendly identification of the job. The string specified here will show up in all information
where Torque job attributes are displayed, including the <span class="codefrag">qstat</span> command.</p>
<p>To specify the name or title, use the following syntax:</p>
<pre class="code">$ hod allocate -d cluster_dir -n number_of_nodes -N name_of_job</pre>
<p>
<em>Note:</em> Due to restriction in the underlying Torque resource manager, names which do not start with an alphabet character
or contain a 'space' will cause the job to fail. The failure message points to the problem being in the specified job name.</p>
<a name="N102C1"></a><a name="Capturing+HOD+Exit+Codes+in+Torque"></a>
<h4> Capturing HOD Exit Codes in Torque </h4>
<a name="Capturing_HOD_exit_codes_in_Torq" id="Capturing_HOD_exit_codes_in_Torq"></a>
<p>HOD exit codes are captured in the Torque exit_status field. This will help users and system administrators to distinguish successful
runs from unsuccessful runs of HOD. The exit codes are 0 if allocation succeeded and all hadoop jobs ran on the allocated cluster correctly.
They are non-zero if allocation failed or some of the hadoop jobs failed on the allocated cluster. The exit codes that are possible are
mentioned in the table below. <em>Note: Hadoop job status is captured only if the version of Hadoop used is 16 or above.</em>
</p>
<table class="ForrestTable" cellspacing="1" cellpadding="4">
<tr>
<th colspan="1" rowspan="1"> Exit Code </th>
<th colspan="1" rowspan="1"> Meaning </th>
</tr>
<tr>
<td colspan="1" rowspan="1"> 6 </td>
<td colspan="1" rowspan="1"> Ringmaster failure </td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 7 </td>
<td colspan="1" rowspan="1"> HDFS failure </td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 8 </td>
<td colspan="1" rowspan="1"> Job tracker failure </td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 10 </td>
<td colspan="1" rowspan="1"> Cluster dead </td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 12 </td>
<td colspan="1" rowspan="1"> Cluster already allocated </td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 13 </td>
<td colspan="1" rowspan="1"> HDFS dead </td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 14 </td>
<td colspan="1" rowspan="1"> Mapred dead </td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 16 </td>
<td colspan="1" rowspan="1"> All MapReduce jobs that ran on the cluster failed. Refer to hadoop logs for more details. </td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 17 </td>
<td colspan="1" rowspan="1"> Some of the MapReduce jobs that ran on the cluster failed. Refer to hadoop logs for more details. </td>
</tr>
</table>
<a name="N10353"></a><a name="Command+Line"></a>
<h4> Command Line</h4>
<a name="Command_Line" id="Command_Line"></a>
<p>HOD command line has the following general syntax:</p>
<pre class="code">hod <operation> [ARGS] [OPTIONS]</pre>
<p> Allowed operations are 'allocate', 'deallocate', 'info', 'list', 'script' and 'help'. For help with a particular operation do: </p>
<pre class="code">hod help <operation></pre>
<p>To have a look at possible options do:</p>
<pre class="code">hod help options</pre>
<ul>
<li>
<em>allocate</em>
<br>
<em>Usage : hod allocate -d cluster_dir -n number_of_nodes [OPTIONS]</em>
<br>
Allocates a cluster on the given number of cluster nodes, and store the allocation information in cluster_dir for use with subsequent
<span class="codefrag">hadoop</span> commands. Note that the <span class="codefrag">cluster_dir</span> must exist before running the command.</li>
<li>
<em>list</em>
<br>
<em>Usage : hod list [OPTIONS]</em>
<br>
Lists the clusters allocated by this user. Information provided includes the Torque job id corresponding to the cluster, the cluster
directory where the allocation information is stored, and whether the MapReduce daemon is still active or not.</li>
<li>
<em>info</em>
<br>
<em>Usage : hod info -d cluster_dir [OPTIONS]</em>
<br>
Lists information about the cluster whose allocation information is stored in the specified cluster directory.</li>
<li>
<em>deallocate</em>
<br>
<em>Usage : hod deallocate -d cluster_dir [OPTIONS]</em>
<br>
Deallocates the cluster whose allocation information is stored in the specified cluster directory.</li>
<li>
<em>script</em>
<br>
<em>Usage : hod script -s script_file -d cluster_directory -n number_of_nodes [OPTIONS]</em>
<br>
Runs a hadoop script using HOD<em>script</em> operation. Provisions Hadoop on a given number of nodes, executes the given
script from the submitting node, and deallocates the cluster when the script completes.</li>
<li>
<em>help</em>
<br>
<em>Usage : hod help [operation | 'options']</em>
<br>
When no argument is specified, <span class="codefrag">hod help</span> gives the usage and basic options, and is equivalent to
<span class="codefrag">hod --help</span> (See below). When 'options' is given as argument, hod displays only the basic options
that hod takes. When an operation is specified, it displays the usage and description corresponding to that particular
operation. For e.g, to know about allocate operation, one can do a <span class="codefrag">hod help allocate</span>
</li>
</ul>
<p>Besides the operations, HOD can take the following command line options.</p>
<ul>
<li>
<em>--help</em>
<br>
Prints out the help message to see the usage and basic options.</li>
<li>
<em>--verbose-help</em>
<br>
All configuration options provided in the hodrc file can be passed on the command line, using the syntax
<span class="codefrag">--section_name.option_name[=value]</span>. When provided this way, the value provided on command line
overrides the option provided in hodrc. The verbose-help command lists all the available options in the hodrc file.
This is also a nice way to see the meaning of the configuration options. <br>"</li>
</ul>
<p>See <a href="#Options_Configuring_HOD">Options Configuring HOD</a> for a description of most important hod configuration options.
For basic options do <span class="codefrag">hod help options</span> and for all options possible in hod configuration do <span class="codefrag">hod --verbose-help</span>.
See <a href="#HOD+Configuration">HOD Configuration</a> for a description of all options.</p>
<a name="N103E9"></a><a name="Options+Configuring+HOD"></a>
<h4> Options Configuring HOD </h4>
<a name="Options_Configuring_HOD" id="Options_Configuring_HOD"></a>
<p>As described above, HOD is configured using a configuration file that is usually set up by system administrators.
This is a INI style configuration file that is divided into sections, and options inside each section. Each section relates
to one of the HOD processes: client, ringmaster, hodring, mapreduce or hdfs. The options inside a section comprise
of an option name and value. </p>
<p>Users can override the configuration defined in the default configuration in two ways: </p>
<ul>
<li> Users can supply their own configuration file to HOD in each of the commands, using the <span class="codefrag">-c</span> option</li>
<li> Users can supply specific configuration options to HOD/ Options provided on command line <em>override</em>
the values provided in the configuration file being used.</li>
</ul>
<p>This section describes some of the most commonly used configuration options. These commonly used options are
provided with a <em>short</em> option for convenience of specification. All other options can be specified using
a <em>long</em> option that is also described below.</p>
<ul>
<li>
<em>-c config_file</em>
<br>
Provides the configuration file to use. Can be used with all other options of HOD. Alternatively, the
<span class="codefrag">HOD_CONF_DIR</span> environment variable can be defined to specify a directory that contains a file
named <span class="codefrag">hodrc</span>, alleviating the need to specify the configuration file in each HOD command.</li>
<li>
<em>-d cluster_dir</em>
<br>
This is required for most of the hod operations. As described under <a href="#Create_a_Cluster_Directory">Create a Cluster Directory</a>,
the <em>cluster directory</em> is a directory on the local file system where <span class="codefrag">hod</span> will generate the Hadoop configuration,
<em>hadoop-site.xml</em>, corresponding to the cluster it allocates. Pass it to the <span class="codefrag">hod</span> operations as an argument
to -d or --hod.clusterdir. If it doesn't already exist, HOD will automatically try to create it and use it. Once a cluster is allocated, a
user can utilize it to run Hadoop jobs by specifying the clusterdirectory as the Hadoop --config option.</li>
<li>
<em>-n number_of_nodes</em>
<br>
This is required for the hod 'allocation' operation and for script operation. This denotes the number of nodes to be allocated.</li>
<li>
<em>-s script-file</em>
<br>
Required when using script operation, specifies the script file to execute.</li>
<li>
<em>-b 1|2|3|4</em>
<br>
Enables the given debug level. Can be used with all other options of HOD. 4 is most verbose.</li>
<li>
<em>-t hadoop_tarball</em>
<br>
Provisions Hadoop from the given tar.gz file. This option is only applicable to the <em>allocate</em> operation. For better
distribution performance it is strongly recommended that the Hadoop tarball is created <em>after</em> removing the source
or documentation.</li>
<li>
<em>-N job-name</em>
<br>
The Name to give to the resource manager job that HOD uses underneath. For e.g. in the case of Torque, this translates to
the <span class="codefrag">qsub -N</span> option, and can be seen as the job name using the <span class="codefrag">qstat</span> command.</li>
<li>
<em>-l wall-clock-time</em>
<br>
The amount of time for which the user expects to have work on the allocated cluster. This is passed to the resource manager
underneath HOD, and can be used in more efficient scheduling and utilization of the cluster. Note that in the case of Torque,
the cluster is automatically deallocated after this time expires.</li>
<li>
<em>-j java-home</em>
<br>
Path to be set to the JAVA_HOME environment variable. This is used in the <em>script</em> operation. HOD sets the
JAVA_HOME environment variable tot his value and launches the user script in that.</li>
<li>
<em>-A account-string</em>
<br>
Accounting information to pass to underlying resource manager.</li>
<li>
<em>-Q queue-name</em>
<br>
Name of the queue in the underlying resource manager to which the job must be submitted.</li>
<li>
<em>-Mkey1=value1 -Mkey2=value2</em>
<br>
Provides configuration parameters for the provisioned MapReduce daemons (JobTracker and TaskTrackers). A
hadoop-site.xml is generated with these values on the cluster nodes. <br>
<em>Note:</em> Values which have the following characters: space, comma, equal-to, semi-colon need to be
escaped with a '\' character, and need to be enclosed within quotes. You can escape a '\' with a '\' too. </li>
<li>
<em>-Hkey1=value1 -Hkey2=value2</em>
<br>
Provides configuration parameters for the provisioned HDFS daemons (NameNode and DataNodes). A hadoop-site.xml
is generated with these values on the cluster nodes <br>
<em>Note:</em> Values which have the following characters: space, comma, equal-to, semi-colon need to be
escaped with a '\' character, and need to be enclosed within quotes. You can escape a '\' with a '\' too. </li>
<li>
<em>-Ckey1=value1 -Ckey2=value2</em>
<br>
Provides configuration parameters for the client from where jobs can be submitted. A hadoop-site.xml is generated
with these values on the submit node. <br>
<em>Note:</em> Values which have the following characters: space, comma, equal-to, semi-colon need to be
escaped with a '\' character, and need to be enclosed within quotes. You can escape a '\' with a '\' too. </li>
<li>
<em>--section-name.option-name=value</em>
<br>
This is the method to provide options using the <em>long</em> format. For e.g. you could say <em>--hod.script-wait-time=20</em>
</li>
</ul>
<a name="N104A6"></a><a name="Troubleshooting-N104A6"></a>
<h3 class="h4"> Troubleshooting </h3>
<a name="Troubleshooting" id="Troubleshooting"></a>
<p>The following section identifies some of the most likely error conditions users can run into when using HOD and ways to trouble-shoot them</p>
<a name="N104B1"></a><a name="HOD+Hangs+During+Allocation"></a>
<h4>HOD Hangs During Allocation </h4>
<a name="_hod_Hangs_During_Allocation" id="_hod_Hangs_During_Allocation"></a><a name="hod_Hangs_During_Allocation" id="hod_Hangs_During_Allocation"></a>
<p>
<em>Possible Cause:</em> One of the HOD or Hadoop components have failed to come up. In such a case, the
<span class="codefrag">hod</span> command will return after a few minutes (typically 2-3 minutes) with an error code of either 7 or 8
as defined in the Error Codes section. Refer to that section for further details. </p>
<p>
<em>Possible Cause:</em> A large allocation is fired with a tarball. Sometimes due to load in the network, or on
the allocated nodes, the tarball distribution might be significantly slow and take a couple of minutes to come back.
Wait for completion. Also check that the tarball does not have the Hadoop sources or documentation.</p>
<p>
<em>Possible Cause:</em> A Torque related problem. If the cause is Torque related, the <span class="codefrag">hod</span>
command will not return for more than 5 minutes. Running <span class="codefrag">hod</span> in debug mode may show the
<span class="codefrag">qstat</span> command being executed repeatedly. Executing the <span class="codefrag">qstat</span> command from
a separate shell may show that the job is in the <span class="codefrag">Q</span> (Queued) state. This usually indicates a
problem with Torque. Possible causes could include some nodes being down, or new nodes added that Torque
is not aware of. Generally, system administator help is needed to resolve this problem.</p>
<a name="N104DD"></a><a name="HOD+Hangs+During+Deallocation"></a>
<h4>HOD Hangs During Deallocation </h4>
<a name="_hod_Hangs_During_Deallocation" id="_hod_Hangs_During_Deallocation"></a><a name="hod_Hangs_During_Deallocation" id="hod_Hangs_During_Deallocation"></a>
<p>
<em>Possible Cause:</em> A Torque related problem, usually load on the Torque server, or the allocation is very large.
Generally, waiting for the command to complete is the only option.</p>
<a name="N104ED"></a><a name="HOD+Fails+With+an+Error+Code+and+Error+Message"></a>
<h4>HOD Fails With an Error Code and Error Message </h4>
<a name="hod_Fails_With_an_error_code_and" id="hod_Fails_With_an_error_code_and"></a><a name="_hod_Fails_With_an_error_code_an" id="_hod_Fails_With_an_error_code_an"></a>
<p>If the exit code of the <span class="codefrag">hod</span> command is not <span class="codefrag">0</span>, then refer to the following table
of error exit codes to determine why the code may have occurred and how to debug the situation.</p>
<p>
<strong> Error Codes </strong>
</p>
<a name="Error_Codes" id="Error_Codes"></a>
<table class="ForrestTable" cellspacing="1" cellpadding="4">
<tr>
<th colspan="1" rowspan="1">Error Code</th>
<th colspan="1" rowspan="1">Meaning</th>
<th colspan="1" rowspan="1">Possible Causes and Remedial Actions</th>
</tr>
<tr>
<td colspan="1" rowspan="1"> 1 </td>
<td colspan="1" rowspan="1"> Configuration error </td>
<td colspan="1" rowspan="1"> Incorrect configuration values specified in hodrc, or other errors related to HOD configuration.
The error messages in this case must be sufficient to debug and fix the problem. </td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 2 </td>
<td colspan="1" rowspan="1"> Invalid operation </td>
<td colspan="1" rowspan="1"> Do <span class="codefrag">hod help</span> for the list of valid operations. </td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 3 </td>
<td colspan="1" rowspan="1"> Invalid operation arguments </td>
<td colspan="1" rowspan="1"> Do <span class="codefrag">hod help operation</span> for listing the usage of a particular operation.</td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 4 </td>
<td colspan="1" rowspan="1"> Scheduler failure </td>
<td colspan="1" rowspan="1"> 1. Requested more resources than available. Run <span class="codefrag">checknodes cluster_name</span> to see if enough nodes are available. <br>
2. Requested resources exceed resource manager limits. <br>
3. Torque is misconfigured, the path to Torque binaries is misconfigured, or other Torque problems. Contact system administrator. </td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 5 </td>
<td colspan="1" rowspan="1"> Job execution failure </td>
<td colspan="1" rowspan="1"> 1. Torque Job was deleted from outside. Execute the Torque <span class="codefrag">qstat</span> command to see if you have any jobs in the
<span class="codefrag">R</span> (Running) state. If none exist, try re-executing HOD. <br>
2. Torque problems such as the server momentarily going down, or becoming unresponsive. Contact system administrator. <br>
3. The system administrator might have configured account verification, and an invalid account is specified. Contact system administrator.</td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 6 </td>
<td colspan="1" rowspan="1"> Ringmaster failure </td>
<td colspan="1" rowspan="1"> HOD prints the message "Cluster could not be allocated because of the following errors on the ringmaster host <hostname>".
The actual error message may indicate one of the following:<br>
1. Invalid configuration on the node running the ringmaster, specified by the hostname in the error message.<br>
2. Invalid configuration in the <span class="codefrag">ringmaster</span> section,<br>
3. Invalid <span class="codefrag">pkgs</span> option in <span class="codefrag">gridservice-mapred or gridservice-hdfs</span> section,<br>
4. An invalid hadoop tarball, or a tarball which has bundled an invalid configuration file in the conf directory,<br>
5. Mismatched version in Hadoop between the MapReduce and an external HDFS.<br>
The Torque <span class="codefrag">qstat</span> command will most likely show a job in the <span class="codefrag">C</span> (Completed) state. <br>
One can login to the ringmaster host as given by HOD failure message and debug the problem with the help of the error message.
If the error message doesn't give complete information, ringmaster logs should help finding out the root cause of the problem.
Refer to the section <em>Locating Ringmaster Logs</em> below for more information. </td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 7 </td>
<td colspan="1" rowspan="1"> HDFS failure </td>
<td colspan="1" rowspan="1"> When HOD fails to allocate due to HDFS failures (or Job tracker failures, error code 8, see below), it prints a failure message
"Hodring at <hostname> failed with following errors:" and then gives the actual error message, which may indicate one of the following:<br>
1. Problem in starting Hadoop clusters. Usually the actual cause in the error message will indicate the problem on the hostname mentioned.
Also, review the Hadoop related configuration in the HOD configuration files. Look at the Hadoop logs using information specified in
<em>Collecting and Viewing Hadoop Logs</em> section above. <br>
2. Invalid configuration on the node running the hodring, specified by the hostname in the error message <br>
3. Invalid configuration in the <span class="codefrag">hodring</span> section of hodrc. <span class="codefrag">ssh</span> to the hostname specified in the
error message and grep for <span class="codefrag">ERROR</span> or <span class="codefrag">CRITICAL</span> in hodring logs. Refer to the section
<em>Locating Hodring Logs</em> below for more information. <br>
4. Invalid tarball specified which is not packaged correctly. <br>
5. Cannot communicate with an externally configured HDFS.<br>
When such HDFS or Job tracker failure occurs, one can login into the host with hostname mentioned in HOD failure message and debug the problem.
While fixing the problem, one should also review other log messages in the ringmaster log to see which other machines also might have had problems
bringing up the jobtracker/namenode, apart from the hostname that is reported in the failure message. This possibility of other machines also having problems
occurs because HOD continues to try and launch hadoop daemons on multiple machines one after another depending upon the value of the configuration
variable <a href="hod_scheduler.html#ringmaster+options">ringmaster.max-master-failures</a>.
See <a href="hod_scheduler.html#Locating+Ringmaster+Logs">Locating Ringmaster Logs</a> for more information.</td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 8 </td>
<td colspan="1" rowspan="1"> Job tracker failure </td>
<td colspan="1" rowspan="1"> Similar to the causes in <em>DFS failure</em> case. </td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 10 </td>
<td colspan="1" rowspan="1"> Cluster dead </td>
<td colspan="1" rowspan="1"> 1. Cluster was auto-deallocated because it was idle for a long time. <br>
2. Cluster was auto-deallocated because the wallclock time specified by the system administrator or user was exceeded. <br>
3. Cannot communicate with the JobTracker and HDFS NameNode which were successfully allocated. Deallocate the cluster, and allocate again. </td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 12 </td>
<td colspan="1" rowspan="1"> Cluster already allocated </td>
<td colspan="1" rowspan="1"> The cluster directory specified has been used in a previous allocate operation and is not yet deallocated.
Specify a different directory, or deallocate the previous allocation first. </td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 13 </td>
<td colspan="1" rowspan="1"> HDFS dead </td>
<td colspan="1" rowspan="1"> Cannot communicate with the HDFS NameNode. HDFS NameNode went down. </td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 14 </td>
<td colspan="1" rowspan="1"> Mapred dead </td>
<td colspan="1" rowspan="1"> 1. Cluster was auto-deallocated because it was idle for a long time. <br>
2. Cluster was auto-deallocated because the wallclock time specified by the system administrator or user was exceeded. <br>
3. Cannot communicate with the MapReduce JobTracker. JobTracker node went down. <br>
</td>
</tr>
<tr>
<td colspan="1" rowspan="1"> 15 </td>
<td colspan="1" rowspan="1"> Cluster not allocated </td>
<td colspan="1" rowspan="1"> An operation which requires an allocated cluster is given a cluster directory with no state information. </td>
</tr>
<tr>
<td colspan="1" rowspan="1"> Any non-zero exit code </td>
<td colspan="1" rowspan="1"> HOD script error </td>
<td colspan="1" rowspan="1"> If the hod script option was used, it is likely that the exit code is from the script. Unfortunately, this could clash with the
exit codes of the hod command itself. In order to help users differentiate these two, hod writes the script's exit code to a file
called script.exitcode in the cluster directory, if the script returned an exit code. You can cat this file to determine the script's
exit code. If it does not exist, then it is a hod command exit code.</td>
</tr>
</table>
<a name="N10682"></a><a name="Hadoop+DFSClient+Warns+with+a%0A++NotReplicatedYetException"></a>
<h4>Hadoop DFSClient Warns with a
NotReplicatedYetException</h4>
<p>Sometimes, when you try to upload a file to the HDFS immediately after
allocating a HOD cluster, DFSClient warns with a NotReplicatedYetException. It
usually shows a message something like - </p>
<pre class="code">
WARN hdfs.DFSClient: NotReplicatedYetException sleeping <filename> retries left 3
08/01/25 16:31:40 INFO hdfs.DFSClient: org.apache.hadoop.ipc.RemoteException: java.io.IOException:
File <filename> could only be replicated to 0 nodes, instead of 1</pre>
<p> This scenario arises when you try to upload a file
to the HDFS while the DataNodes are still in the process of contacting the
NameNode. This can be resolved by waiting for some time before uploading a new
file to the HDFS, so that enough DataNodes start and contact the NameNode.</p>
<a name="N10692"></a><a name="Hadoop+Jobs+Not+Running+on+a+Successfully+Allocated+Cluster"></a>
<h4> Hadoop Jobs Not Running on a Successfully Allocated Cluster </h4>
<a name="Hadoop_Jobs_Not_Running_on_a_Suc" id="Hadoop_Jobs_Not_Running_on_a_Suc"></a>
<p>This scenario generally occurs when a cluster is allocated, and is left inactive for sometime, and then hadoop jobs
are attempted to be run on them. Then Hadoop jobs fail with the following exception:</p>
<pre class="code">08/01/25 16:31:40 INFO ipc.Client: Retrying connect to server: foo.bar.com/1.1.1.1:53567. Already tried 1 time(s).</pre>
<p>
<em>Possible Cause:</em> No Hadoop jobs were run for a significant portion of time. Thus the cluster would have got
deallocated as described in the section <em>Auto-deallocation of Idle Clusters</em>. Deallocate the cluster and allocate it again.</p>
<p>
<em>Possible Cause:</em> The wallclock limit specified by the Torque administrator or the <span class="codefrag">-l</span> option
defined in the section <em>Specifying Additional Job Attributes</em> was exceeded since allocation time. Thus the cluster
would have got released. Deallocate the cluster and allocate it again.</p>
<p>
<em>Possible Cause:</em> There is a version mismatch between the version of the hadoop being used in provisioning
(typically via the tarball option) and the external HDFS. Ensure compatible versions are being used.</p>
<p>
<em>Possible Cause:</em> There is a version mismatch between the version of the hadoop client being used to submit
jobs and the hadoop used in provisioning (typically via the tarball option). Ensure compatible versions are being used.</p>
<p>
<em>Possible Cause:</em> You used one of the options for specifying Hadoop configuration <span class="codefrag">-M or -H</span>,
which had special characters like space or comma that were not escaped correctly. Refer to the section
<em>Options Configuring HOD</em> for checking how to specify such options correctly.</p>
<a name="N106C9"></a><a name="My+Hadoop+Job+Got+Killed"></a>
<h4> My Hadoop Job Got Killed </h4>
<a name="My_Hadoop_Job_Got_Killed" id="My_Hadoop_Job_Got_Killed"></a>
<p>
<em>Possible Cause:</em> The wallclock limit specified by the Torque administrator or the <span class="codefrag">-l</span>
option defined in the section <em>Specifying Additional Job Attributes</em> was exceeded since allocation time.
Thus the cluster would have got released. Deallocate the cluster and allocate it again, this time with a larger wallclock time.</p>
<p>
<em>Possible Cause:</em> Problems with the JobTracker node. Refer to the section in <em>Collecting and Viewing Hadoop Logs</em> to get more information.</p>
<a name="N106E4"></a><a name="Hadoop+Job+Fails+with+Message%3A+%27Job+tracker+still+initializing%27"></a>
<h4> Hadoop Job Fails with Message: 'Job tracker still initializing' </h4>
<a name="Hadoop_Job_Fails_with_Message_Jo" id="Hadoop_Job_Fails_with_Message_Jo"></a>
<p>
<em>Possible Cause:</em> The hadoop job was being run as part of the HOD script command, and it started before the JobTracker could come up fully.
Allocate the cluster using a large value for the configuration option <span class="codefrag">--hod.script-wait-time</span>.
Typically a value of 120 should work, though it is typically unnecessary to be that large.</p>
<a name="N106F4"></a><a name="The+Exit+Codes+For+HOD+Are+Not+Getting+Into+Torque"></a>
<h4> The Exit Codes For HOD Are Not Getting Into Torque </h4>
<a name="The_Exit_Codes_For_HOD_Are_Not_G" id="The_Exit_Codes_For_HOD_Are_Not_G"></a>
<p>
<em>Possible Cause:</em> Version 0.16 of hadoop is required for this functionality to work.
The version of Hadoop used does not match. Use the required version of Hadoop.</p>
<p>
<em>Possible Cause:</em> The deallocation was done without using the <span class="codefrag">hod</span>
command; for e.g. directly using <span class="codefrag">qdel</span>. When the cluster is deallocated in this manner,
the HOD processes are terminated using signals. This results in the exit code to be based on the
signal number, rather than the exit code of the program.</p>
<a name="N1070C"></a><a name="The+Hadoop+Logs+are+Not+Uploaded+to+HDFS"></a>
<h4> The Hadoop Logs are Not Uploaded to HDFS </h4>
<a name="The_Hadoop_Logs_are_Not_Uploaded" id="The_Hadoop_Logs_are_Not_Uploaded"></a>
<p>
<em>Possible Cause:</em> There is a version mismatch between the version of the hadoop being used for uploading the logs
and the external HDFS. Ensure that the correct version is specified in the <span class="codefrag">hodring.pkgs</span> option.</p>
<a name="N1071C"></a><a name="Locating+Ringmaster+Logs"></a>
<h4> Locating Ringmaster Logs </h4>
<a name="Locating_Ringmaster_Logs" id="Locating_Ringmaster_Logs"></a>
<p>To locate the ringmaster logs, follow these steps: </p>
<ul>
<li> Execute hod in the debug mode using the -b option. This will print the Torque job id for the current run.</li>
<li> Execute <span class="codefrag">qstat -f torque_job_id</span> and look up the value of the <span class="codefrag">exec_host</span> parameter in the output.
The first host in this list is the ringmaster node.</li>
<li> Login to this node.</li>
<li> The ringmaster log location is specified by the <span class="codefrag">ringmaster.log-dir</span> option in the hodrc. The name of the log file will be
<span class="codefrag">username.torque_job_id/ringmaster-main.log</span>.</li>
<li> If you don't get enough information, you may want to set the ringmaster debug level to 4. This can be done by passing
<span class="codefrag">--ringmaster.debug 4</span> to the hod command line.</li>
</ul>
<a name="N10748"></a><a name="Locating+Hodring+Logs"></a>
<h4> Locating Hodring Logs </h4>
<a name="Locating_Hodring_Logs" id="Locating_Hodring_Logs"></a>
<p>To locate hodring logs, follow the steps below: </p>
<ul>
<li> Execute hod in the debug mode using the -b option. This will print the Torque job id for the current run.</li>
<li> Execute <span class="codefrag">qstat -f torque_job_id</span> and look up the value of the <span class="codefrag">exec_host</span> parameter in the output.
All nodes in this list should have a hodring on them.</li>
<li> Login to any of these nodes.</li>
<li> The hodring log location is specified by the <span class="codefrag">hodring.log-dir</span> option in the hodrc. The name of the log file will be
<span class="codefrag">username.torque_job_id/hodring-main.log</span>.</li>
<li> If you don't get enough information, you may want to set the hodring debug level to 4. This can be done by passing
<span class="codefrag">--hodring.debug 4</span> to the hod command line.</li>
</ul>
</div>
<!-- HOD ADMINISTRATORS -->
<a name="N10778"></a><a name="HOD+Administrators"></a>
<h2 class="h3">HOD Administrators</h2>
<div class="section">
<p>This section show administrators how to install, configure and run HOD.</p>
<a name="N10781"></a><a name="Getting+Started-N10781"></a>
<h3 class="h4">Getting Started</h3>
<p>The basic system architecture of HOD includes these components:</p>
<ul>
<li>A Resource manager, possibly together with a scheduler (see <a href="hod_scheduler.html#Prerequisites"> Prerequisites</a>) </li>
<li>Various HOD components</li>
<li>Hadoop MapReduce and HDFS daemons</li>
</ul>
<p>
HOD provisions and maintains Hadoop MapReduce and, optionally, HDFS instances
through interaction with the above components on a given cluster of nodes. A cluster of
nodes can be thought of as comprising two sets of nodes:</p>
<ul>
<li>Submit nodes: Users use the HOD client on these nodes to allocate clusters, and then
use the Hadoop client to submit Hadoop jobs. </li>
<li>Compute nodes: Using the resource manager, HOD components are run on these nodes to
provision the Hadoop daemons. After that Hadoop jobs run on them.</li>
</ul>
<p>
Here is a brief description of the sequence of operations in allocating a cluster and
running jobs on them.
</p>
<ul>
<li>The user uses the HOD client on the Submit node to allocate a desired number of
cluster nodes and to provision Hadoop on them.</li>
<li>The HOD client uses a resource manager interface (qsub, in Torque) to submit a HOD
process, called the RingMaster, as a Resource Manager job, to request the user's desired number
of nodes. This job is submitted to the central server of the resource manager (pbs_server, in Torque).</li>
<li>On the compute nodes, the resource manager slave daemons (pbs_moms in Torque) accept
and run jobs that they are assigned by the central server (pbs_server in Torque). The RingMaster
process is started on one of the compute nodes (mother superior, in Torque).</li>
<li>The RingMaster then uses another resource manager interface (pbsdsh, in Torque) to run
the second HOD component, HodRing, as distributed tasks on each of the compute
nodes allocated.</li>
<li>The HodRings, after initializing, communicate with the RingMaster to get Hadoop commands,
and run them accordingly. Once the Hadoop commands are started, they register with the RingMaster,
giving information about the daemons.</li>
<li>All the configuration files needed for Hadoop instances are generated by HOD itself,
some obtained from options given by user in its own configuration file.</li>
<li>The HOD client keeps communicating with the RingMaster to find out the location of the
JobTracker and HDFS daemons.</li>
</ul>
<a name="N107C2"></a><a name="Prerequisites"></a>
<h3 class="h4">Prerequisites</h3>
<p>To use HOD, your system should include the following components.</p>
<ul>
<li>Operating System: HOD is currently tested on RHEL4.</li>
<li>Nodes: HOD requires a minimum of three nodes configured through a resource manager.</li>
<li>Software: The following components must be installed on ALL nodes before using HOD:
<ul>
<li>
<a href="http://www.clusterresources.com/pages/products/torque-resource-manager.php">Torque: Resource manager</a>
</li>
<li>
<a href="http://www.python.org">Python</a> : HOD requires version 2.5.1 of Python.</li>
</ul>
</li>
<li>Software (optional): The following components are optional and can be installed to obtain better
functionality from HOD:
<ul>
<li>
<a href="http://twistedmatrix.com/trac/">Twisted Python</a>: This can be
used for improving the scalability of HOD. If this module is detected to be
installed, HOD uses it, else it falls back to default modules.</li>
<li>
<a href="http://hadoop.apache.org">Hadoop</a>: HOD can automatically
distribute Hadoop to all nodes in the cluster. However, it can also use a
pre-installed version of Hadoop, if it is available on all nodes in the cluster.
HOD currently supports Hadoop 0.15 and above.</li>
</ul>
</li>
</ul>
<p>Note: HOD configuration requires the location of installs of these
components to be the same on all nodes in the cluster. It will also
make the configuration simpler to have the same location on the submit
nodes.
</p>
<a name="N107F9"></a><a name="Resource+Manager"></a>
<h3 class="h4">Resource Manager</h3>
<p> Currently HOD works with the Torque resource manager, which it uses for its node
allocation and job submission. Torque is an open source resource manager from
<a href="http://www.clusterresources.com">Cluster Resources</a>, a community effort
based on the PBS project. It provides control over batch jobs and distributed compute nodes. Torque is
freely available for download from <a href="http://www.clusterresources.com/downloads/torque/">here</a>.
</p>
<p> All documentation related to torque can be seen under
the section TORQUE Resource Manager <a href="http://www.clusterresources.com/pages/resources/documentation.php">here</a>. You can
get wiki documentation from <a href="http://www.clusterresources.com/wiki/doku.php?id=torque:torque_wiki">here</a>.
Users may wish to subscribe to TORQUE’s mailing list or view the archive for questions,
comments <a href="http://www.clusterresources.com/pages/resources/mailing-lists.php">here</a>.
</p>
<p>To use HOD with Torque:</p>
<ul>
<li>Install Torque components: pbs_server on one node (head node), pbs_mom on all
compute nodes, and PBS client tools on all compute nodes and submit
nodes. Perform at least a basic configuration so that the Torque system is up and
running, that is, pbs_server knows which machines to talk to. Look <a href="http://www.clusterresources.com/wiki/doku.php?id=torque:1.2_basic_configuration">here</a>
for basic configuration.
For advanced configuration, see <a href="http://www.clusterresources.com/wiki/doku.php?id=torque:1.3_advanced_configuration">here</a>
</li>
<li>Create a queue for submitting jobs on the pbs_server. The name of the queue is the
same as the HOD configuration parameter, resource-manager.queue. The HOD client uses this queue to
submit the RingMaster process as a Torque job.</li>
<li>Specify a cluster name as a property for all nodes in the cluster.
This can be done by using the qmgr command. For example:
<span class="codefrag">qmgr -c "set node node properties=cluster-name"</span>. The name of the cluster is the same as
the HOD configuration parameter, hod.cluster. </li>
<li>Make sure that jobs can be submitted to the nodes. This can be done by
using the qsub command. For example:
<span class="codefrag">echo "sleep 30" | qsub -l nodes=3</span>
</li>
</ul>
<a name="N10838"></a><a name="Installing+HOD"></a>
<h3 class="h4">Installing HOD</h3>
<p>Once the resource manager is set up, you can obtain and
install HOD.</p>
<ul>
<li>If you are getting HOD from the Hadoop tarball, it is available under the
'contrib' section of Hadoop, under the root directory 'hod'.</li>
<li>If you are building from source, you can run ant tar from the Hadoop root
directory to generate the Hadoop tarball, and then get HOD from there,
as described above.</li>
<li>Distribute the files under this directory to all the nodes in the
cluster. Note that the location where the files are copied should be
the same on all the nodes.</li>
<li>Note that compiling hadoop would build HOD with appropriate permissions
set on all the required script files in HOD.</li>
</ul>
<a name="N10851"></a><a name="Configuring+HOD"></a>
<h3 class="h4">Configuring HOD</h3>
<p>You can configure HOD once it is installed. The minimal configuration needed
to run HOD is described below. More advanced configuration options are discussed
in the HOD Configuration.</p>
<a name="N1085A"></a><a name="Minimal+Configuration"></a>
<h4>Minimal Configuration</h4>
<p>To get started using HOD, the following minimal configuration is
required:</p>
<ul>
<li>On the node from where you want to run HOD, edit the file hodrc
located in the <install dir>/conf directory. This file
contains the minimal set of values required to run hod.</li>
<li>
<p>Specify values suitable to your environment for the following
variables defined in the configuration file. Note that some of these
variables are defined at more than one place in the file.</p>
<ul>
<li>${JAVA_HOME}: Location of Java for Hadoop. Hadoop supports Sun JDK
1.6.x and above.</li>
<li>${CLUSTER_NAME}: Name of the cluster which is specified in the
'node property' as mentioned in resource manager configuration.</li>
<li>${HADOOP_HOME}: Location of Hadoop installation on the compute and
submit nodes.</li>
<li>${RM_QUEUE}: Queue configured for submitting jobs in the resource
manager configuration.</li>
<li>${RM_HOME}: Location of the resource manager installation on the
compute and submit nodes.</li>
</ul>
</li>
<li>
<p>The following environment variables may need to be set depending on
your environment. These variables must be defined where you run the
HOD client and must also be specified in the HOD configuration file as the
value of the key resource_manager.env-vars. Multiple variables can be
specified as a comma separated list of key=value pairs.</p>
<ul>
<li>HOD_PYTHON_HOME: If you install python to a non-default location
of the compute nodes, or submit nodes, then this variable must be
defined to point to the python executable in the non-standard
location.</li>
</ul>
</li>
</ul>
<a name="N1088E"></a><a name="Advanced+Configuration"></a>
<h4>Advanced Configuration</h4>
<p> You can review and modify other configuration options to suit
your specific needs. See <a href="#HOD+Configuration">HOD Configuration</a> for more information.</p>
<a name="N1089D"></a><a name="Running+HOD"></a>
<h3 class="h4">Running HOD</h3>
<p>You can run HOD once it is configured. Refer to <a href="#HOD+Users"> HOD Users</a> for more information.</p>
<a name="N108AB"></a><a name="Supporting+Tools+and+Utilities"></a>
<h3 class="h4">Supporting Tools and Utilities</h3>
<p>This section describes supporting tools and utilities that can be used to
manage HOD deployments.</p>
<a name="N108B4"></a><a name="logcondense.py+-+Manage+Log+Files"></a>
<h4>logcondense.py - Manage Log Files</h4>
<p>As mentioned under
<a href="hod_scheduler.html#Collecting+and+Viewing+Hadoop+Logs">Collecting and Viewing Hadoop Logs</a>,
HOD can be configured to upload
Hadoop logs to a statically configured HDFS. Over time, the number of logs uploaded
to HDFS could increase. logcondense.py is a tool that helps
administrators to remove log files uploaded to HDFS. </p>
<a name="N108C1"></a><a name="Running+logcondense.py"></a>
<h5>Running logcondense.py</h5>
<p>logcondense.py is available under hod_install_location/support folder. You can either
run it using python, for example, <em>python logcondense.py</em>, or give execute permissions
to the file, and directly run it as <em>logcondense.py</em>. logcondense.py needs to be
run by a user who has sufficient permissions to remove files from locations where log
files are uploaded in the HDFS, if permissions are enabled. For example as mentioned under
<a href="hod_scheduler.html#hodring+options">hodring options</a>, the logs could
be configured to come under the user's home directory in HDFS. In that case, the user
running logcondense.py should have super user privileges to remove the files from under
all user home directories.</p>
<a name="N108D5"></a><a name="Command+Line+Options+for+logcondense.py"></a>
<h5>Command Line Options for logcondense.py</h5>
<p>The following command line options are supported for logcondense.py.</p>
<table class="ForrestTable" cellspacing="1" cellpadding="4">
<tr>
<th colspan="1" rowspan="1">Short Option</th>
<th colspan="1" rowspan="1">Long option</th>
<th colspan="1" rowspan="1">Meaning</th>
<th colspan="1" rowspan="1">Example</th>
</tr>
<tr>
<td colspan="1" rowspan="1">-p</td>
<td colspan="1" rowspan="1">--package</td>
<td colspan="1" rowspan="1">Complete path to the hadoop script. The version of hadoop must be the same as the
one running HDFS.</td>
<td colspan="1" rowspan="1">/usr/bin/hadoop</td>
</tr>
<tr>
<td colspan="1" rowspan="1">-d</td>
<td colspan="1" rowspan="1">--days</td>
<td colspan="1" rowspan="1">Delete log files older than the specified number of days</td>
<td colspan="1" rowspan="1">7</td>
</tr>
<tr>
<td colspan="1" rowspan="1">-c</td>
<td colspan="1" rowspan="1">--config</td>
<td colspan="1" rowspan="1">Path to the Hadoop configuration directory, under which hadoop-site.xml resides.
The hadoop-site.xml must point to the HDFS NameNode from where logs are to be removed.</td>
<td colspan="1" rowspan="1">/home/foo/hadoop/conf</td>
</tr>
<tr>
<td colspan="1" rowspan="1">-l</td>
<td colspan="1" rowspan="1">--logs</td>
<td colspan="1" rowspan="1">A HDFS path, this must be the same HDFS path as specified for the log-destination-uri,
as mentioned under <a href="hod_scheduler.html#hodring+options">hodring options</a>,
without the hdfs:// URI string</td>
<td colspan="1" rowspan="1">/user</td>
</tr>
<tr>
<td colspan="1" rowspan="1">-n</td>
<td colspan="1" rowspan="1">--dynamicdfs</td>
<td colspan="1" rowspan="1">If true, this will indicate that the logcondense.py script should delete HDFS logs
in addition to MapReduce logs. Otherwise, it only deletes MapReduce logs, which is also the
default if this option is not specified. This option is useful if
dynamic HDFS installations
are being provisioned by HOD, and the static HDFS installation is being used only to collect
logs - a scenario that may be common in test clusters.</td>
<td colspan="1" rowspan="1">false</td>
</tr>
<tr>
<td colspan="1" rowspan="1">-r</td>
<td colspan="1" rowspan="1">--retain-master-logs</td>
<td colspan="1" rowspan="1">If true, this will keep the JobTracker logs of job in hod-logs inside HDFS and it
will delete only the TaskTracker logs. Also, this will keep the Namenode logs along with
JobTracker logs and will only delete the Datanode logs if 'dynamicdfs' options is set
to true. Otherwise, it will delete the complete job directory from hod-logs inside
HDFS. By default it is set to false.</td>
<td colspan="1" rowspan="1">false</td>
</tr>
</table>
<p>So, for example, to delete all log files older than 7 days using a hadoop-site.xml stored in
~/hadoop-conf, using the hadoop installation under ~/hadoop-0.17.0, you could say:</p>
<p>
<em>python logcondense.py -p ~/hadoop-0.17.0/bin/hadoop -d 7 -c ~/hadoop-conf -l /user</em>
</p>
<a name="N1098F"></a><a name="checklimits.sh+-+Monitor+Resource+Limits"></a>
<h4>checklimits.sh - Monitor Resource Limits</h4>
<p>checklimits.sh is a HOD tool specific to the Torque/Maui environment
(<a href="http://www.clusterresources.com/pages/products/maui-cluster-scheduler.php">Maui Cluster Scheduler</a> is an open source job
scheduler for clusters and supercomputers, from clusterresources). The
checklimits.sh script
updates the torque comment field when newly submitted job(s) violate or
exceed
over user limits set up in Maui scheduler. It uses qstat, does one pass
over the torque job-list to determine queued or unfinished jobs, runs Maui
tool checkjob on each job to see if user limits are violated and then
runs torque's qalter utility to update job attribute 'comment'. Currently
it updates the comment as <em>User-limits exceeded. Requested:([0-9]*)
Used:([0-9]*) MaxLimit:([0-9]*)</em> for those jobs that violate limits.
This comment field is then used by HOD to behave accordingly depending on
the type of violation.</p>
<a name="N1099F"></a><a name="Running+checklimits.sh"></a>
<h5>Running checklimits.sh</h5>
<p>checklimits.sh is available under the hod_install_location/support
folder. This shell script can be run directly as <em>sh
checklimits.sh </em>or as <em>./checklimits.sh</em> after enabling
execute permissions. Torque and Maui binaries should be available
on the machine where the tool is run and should be in the path
of the shell script process. To update the
comment field of jobs from different users, this tool must be run with
torque administrative privileges. This tool must be run repeatedly
after specific intervals of time to frequently update jobs violating
constraints, for example via cron. Please note that the resource manager
and scheduler commands used in this script can be expensive and so
it is better not to run this inside a tight loop without sleeping.</p>
<a name="N109B0"></a><a name="verify-account+Script"></a>
<h4>verify-account Script</h4>
<p>Production systems use accounting packages to charge users for using
shared compute resources. HOD supports a parameter
<em>resource_manager.pbs-account</em> to allow users to identify the
account under which they would like to submit jobs. It may be necessary
to verify that this account is a valid one configured in an accounting
system. The <em>hod-install-dir/bin/verify-account</em> script
provides a mechanism to plug-in a custom script that can do this
verification.</p>
<a name="N109BF"></a><a name="Integrating+the+verify-account+script+with+HOD"></a>
<h5>Integrating the verify-account script with HOD</h5>
<p>HOD runs the <em>verify-account</em> script passing in the
<em>resource_manager.pbs-account</em> value as argument to the script,
before allocating a cluster. Sites can write a script that verify this
account against their accounting systems. Returning a non-zero exit
code from this script will cause HOD to fail allocation. Also, in
case of an error, HOD will print the output of script to the user.
Any descriptive error message can be passed to the user from the
script in this manner.</p>
<p>The default script that comes with the HOD installation does not
do any validation, and returns a zero exit code.</p>
<p>If the verify-account script is not found, then HOD will treat
that verification is disabled, and continue allocation as is.</p>
</div>
<!-- HOD CONFIGURATION -->
<a name="N109DA"></a><a name="HOD+Configuration"></a>
<h2 class="h3">HOD Configuration</h2>
<div class="section">
<p>This section discusses how to work with the HOD configuration options.</p>
<a name="N109E3"></a><a name="Getting+Started-N109E3"></a>
<h3 class="h4">Getting Started</h3>
<p>Configuration options can be specified in two ways: as a configuration file
in the INI format and as command line options to the HOD shell,
specified in the format --section.option[=value]. If the same option is
specified in both places, the value specified on the command line
overrides the value in the configuration file.</p>
<p>To get a simple description of all configuration options use:</p>
<pre class="code">$ hod --verbose-help</pre>
<a name="N109F4"></a><a name="Configuation+Options"></a>
<h3 class="h4">Configuation Options</h3>
<p>HOD organizes configuration options into these sections:</p>
<ul>
<li> common: Options that appear in more than one section. Options defined in a section are used by the
process for which that section applies. Common options have the same meaning, but can have different values in each section.</li>
<li> hod: Options for the HOD client</li>
<li> resource_manager: Options for specifying which resource manager to use, and other parameters for using that resource manager</li>
<li> ringmaster: Options for the RingMaster process, </li>
<li> hodring: Options for the HodRing processes</li>
<li> gridservice-mapred: Options for the MapReduce daemons</li>
<li> gridservice-hdfs: Options for the HDFS daemons.</li>
</ul>
<a name="N10A15"></a><a name="common+options"></a>
<h4>common options</h4>
<ul>
<li>temp-dir: Temporary directory for usage by the HOD processes. Make
sure that the users who will run hod have rights to create
directories under the directory specified here. If you
wish to make this directory vary across allocations,
you can make use of the environmental variables which will
be made available by the resource manager to the HOD
processes. For example, in a Torque setup, having
--ringmaster.temp-dir=/tmp/hod-temp-dir.$PBS_JOBID would
let ringmaster use different temp-dir for each
allocation; Torque expands this variable before starting
the ringmaster.</li>
<li>debug: Numeric value from 1-4. 4 produces the most log information,
and 1 the least.</li>
<li>log-dir: Directory where log files are stored. By default, this is
<install-location>/logs/. The restrictions and notes for the
temp-dir variable apply here too.
</li>
<li>xrs-port-range: Range of ports, among which an available port shall
be picked for use to run an XML-RPC server.</li>
<li>http-port-range: Range of ports, among which an available port shall
be picked for use to run an HTTP server.</li>
<li>java-home: Location of Java to be used by Hadoop.</li>
<li>syslog-address: Address to which a syslog daemon is bound to. The format
of the value is host:port. If configured, HOD log messages
will be logged to syslog using this value.</li>
</ul>
<a name="N10A34"></a><a name="hod+options"></a>
<h4>hod options</h4>
<ul>
<li>cluster: Descriptive name given to the cluster. For Torque, this is specified as a 'Node property' for every node in the cluster.
HOD uses this value to compute the number of available nodes.</li>
<li>client-params: Comma-separated list of hadoop config parameters specified as key-value pairs.
These will be used to generate a hadoop-site.xml on the submit node that should be used for running MapReduce jobs.</li>
<li>job-feasibility-attr: Regular expression string that specifies whether and how to check job feasibility - resource
manager or scheduler limits. The current implementation corresponds to the torque job attribute 'comment' and by default is disabled.
When set, HOD uses it to decide what type of limit violation is triggered and either deallocates the cluster or stays in queued state
according as the request is beyond maximum limits or the cumulative usage has crossed maximum limits. The torque comment attribute may be updated
periodically by an external mechanism. For example, comment attribute can be updated by running
<a href="hod_scheduler.html#checklimits.sh+-+Monitor+Resource+Limits">checklimits.sh</a> script in hod/support directory,
and then setting job-feasibility-attr equal to the value TORQUE_USER_LIMITS_COMMENT_FIELD, "User-limits exceeded. Requested:([0-9]*)
Used:([0-9]*) MaxLimit:([0-9]*)", will make HOD behave accordingly.</li>
</ul>
<a name="N10A4B"></a><a name="resource_manager+options"></a>
<h4>resource_manager options</h4>
<ul>
<li>queue: Name of the queue configured in the resource manager to which
jobs are to be submitted.</li>
<li>batch-home: Install directory to which 'bin' is appended and under
which the executables of the resource manager can be
found.</li>
<li>env-vars: Comma-separated list of key-value pairs,
expressed as key=value, which would be passed to the jobs
launched on the compute nodes.
For example, if the python installation is
in a non-standard location, one can set the environment
variable 'HOD_PYTHON_HOME' to the path to the python
executable. The HOD processes launched on the compute nodes
can then use this variable.</li>
<li>options: Comma-separated list of key-value pairs,
expressed as
<option>:<sub-option>=<value>. When
passing to the job submission program, these are expanded
as -<option> <sub-option>=<value>. These
are generally used for specifying additional resource
contraints for scheduling. For instance, with a Torque
setup, one can specify
--resource_manager.options='l:arch=x86_64' for
constraining the nodes being allocated to a particular
architecture; this option will be passed to Torque's qsub
command as "-l arch=x86_64".</li>
</ul>
<a name="N10A61"></a><a name="ringmaster+options"></a>
<h4>ringmaster options</h4>
<ul>
<li>work-dirs: Comma-separated list of paths that will serve
as the root for directories that HOD generates and passes
to Hadoop for use to store DFS and MapReduce data. For
example,
this is where DFS data blocks will be stored. Typically,
as many paths are specified as there are disks available
to ensure all disks are being utilized. The restrictions
and notes for the temp-dir variable apply here too.</li>
<li>max-master-failures: Number of times a hadoop master
daemon can fail to launch, beyond which HOD will fail
the cluster allocation altogether. In HOD clusters,
sometimes there might be a single or few "bad" nodes due
to issues like missing java, missing or incorrect version
of Hadoop etc. When this configuration variable is set
to a positive integer, the RingMaster returns an error
to the client only when the number of times a hadoop
master (JobTracker or NameNode) fails to start on these
bad nodes because of above issues, exceeds the specified
value. If the number is not exceeded, the next HodRing
which requests for a command to launch is given the same
hadoop master again. This way, HOD tries its best for a
successful allocation even in the presence of a few bad
nodes in the cluster.
</li>
<li>workers_per_ring: Number of workers per service per HodRing.
By default this is set to 1. If this configuration
variable is set to a value 'n', the HodRing will run
'n' instances of the workers (TaskTrackers or DataNodes)
on each node acting as a slave. This can be used to run
multiple workers per HodRing, so that the total number of
workers in a HOD cluster is not limited by the total
number of nodes requested during allocation. However, note
that this will mean each worker should be configured to use
only a proportional fraction of the capacity of the
resources on the node. In general, this feature is only
useful for testing and simulation purposes, and not for
production use.</li>
</ul>
<a name="N10A74"></a><a name="gridservice-hdfs+options"></a>
<h4>gridservice-hdfs options</h4>
<ul>
<li>external: If false, indicates that a HDFS cluster must be
bought up by the HOD system, on the nodes which it
allocates via the allocate command. Note that in that case,
when the cluster is de-allocated, it will bring down the
HDFS cluster, and all the data will be lost.
If true, it will try and connect to an externally configured
HDFS system.
Typically, because input for jobs are placed into HDFS
before jobs are run, and also the output from jobs in HDFS
is required to be persistent, an internal HDFS cluster is
of little value in a production system. However, it allows
for quick testing.</li>
<li>host: Hostname of the externally configured NameNode, if any</li>
<li>fs_port: Port to which NameNode RPC server is bound.</li>
<li>info_port: Port to which the NameNode web UI server is bound.</li>
<li>pkgs: Installation directory, under which bin/hadoop executable is
located. This can be used to use a pre-installed version of
Hadoop on the cluster.</li>
<li>server-params: Comma-separated list of hadoop config parameters
specified key-value pairs. These will be used to
generate a hadoop-site.xml that will be used by the
NameNode and DataNodes.</li>
<li>final-server-params: Same as above, except they will be marked final.</li>
</ul>
<a name="N10A93"></a><a name="gridservice-mapred+options"></a>
<h4>gridservice-mapred options</h4>
<ul>
<li>external: If false, indicates that a MapReduce cluster must be
bought up by the HOD system on the nodes which it allocates
via the allocate command.
If true, if will try and connect to an externally
configured MapReduce system.</li>
<li>host: Hostname of the externally configured JobTracker, if any</li>
<li>tracker_port: Port to which the JobTracker RPC server is bound</li>
<li>info_port: Port to which the JobTracker web UI server is bound.</li>
<li>pkgs: Installation directory, under which bin/hadoop executable is
located</li>
<li>server-params: Comma-separated list of hadoop config parameters
specified key-value pairs. These will be used to
generate a hadoop-site.xml that will be used by the
JobTracker and TaskTrackers</li>
<li>final-server-params: Same as above, except they will be marked final.</li>
</ul>
<a name="N10AB2"></a><a name="hodring+options"></a>
<h4>hodring options</h4>
<ul>
<li>mapred-system-dir-root: Directory in the DFS under which HOD will
generate sub-directory names and pass the full path
as the value of the 'mapred.system.dir' configuration
parameter to Hadoop daemons. The format of the full
path will be value-of-this-option/userid/mapredsystem/cluster-id.
Note that the directory specified here should be such
that all users can create directories under this, if
permissions are enabled in HDFS. Setting the value of
this option to /user will make HOD use the user's
home directory to generate the mapred.system.dir value.</li>
<li>log-destination-uri: URL describing a path in an external, static DFS or the
cluster node's local file system where HOD will upload
Hadoop logs when a cluster is deallocated. To specify a
DFS path, use the format 'hdfs://path'. To specify a
cluster node's local file path, use the format 'file://path'.
When clusters are deallocated by HOD, the hadoop logs will
be deleted as part of HOD's cleanup process. To ensure these
logs persist, you can use this configuration option.
The format of the path is
value-of-this-option/userid/hod-logs/cluster-id
Note that the directory you specify here must be such that all
users can create sub-directories under this. Setting this value
to hdfs://user will make the logs come in the user's home directory
in DFS.</li>
<li>pkgs: Installation directory, under which bin/hadoop executable is located. This will
be used by HOD to upload logs if a HDFS URL is specified in log-destination-uri
option. Note that this is useful if the users are using a tarball whose version
may differ from the external, static HDFS version.</li>
<li>hadoop-port-range: Range of ports, among which an available port shall
be picked for use to run a Hadoop Service, like JobTracker or TaskTracker. </li>
</ul>
</div>
</div>
<!--+
|end content
+-->
<div class="clearboth"> </div>
</div>
<div id="footer">
<!--+
|start bottomstrip
+-->
<div class="lastmodified">
<script type="text/javascript"><!--
document.write("Last Published: " + document.lastModified);
// --></script>
</div>
<div class="copyright">
Copyright ©
2008 <a href="http://www.apache.org/licenses/">The Apache Software Foundation.</a>
</div>
<!--+
|end bottomstrip
+-->
</div>
</body>
</html>
| {
"content_hash": "a35e83f3a9a33d159b405322841b3497",
"timestamp": "",
"source": "github",
"line_count": 2109,
"max_line_length": 254,
"avg_line_length": 48.559981033665245,
"alnum_prop": 0.684883754992042,
"repo_name": "Jun1113/MapReduce-Example",
"id": "758d4a122b1da4600ef02e8c61fa6e47df0899b6",
"size": "102413",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "docs/hod_scheduler.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "390517"
},
{
"name": "C++",
"bytes": "401532"
},
{
"name": "Java",
"bytes": "15731099"
},
{
"name": "JavaScript",
"bytes": "112012"
},
{
"name": "Objective-C",
"bytes": "119767"
},
{
"name": "PHP",
"bytes": "152555"
},
{
"name": "Perl",
"bytes": "149888"
},
{
"name": "Python",
"bytes": "1129211"
},
{
"name": "Ruby",
"bytes": "28485"
},
{
"name": "Shell",
"bytes": "1279010"
},
{
"name": "Smalltalk",
"bytes": "56562"
},
{
"name": "XML",
"bytes": "235129"
}
],
"symlink_target": ""
} |
package org.apache.asterix.dataflow.data.nontagged.serde;
import java.io.DataInput;
import java.io.DataOutput;
import org.apache.asterix.om.base.ATime;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
public class ATimeSerializerDeserializer implements ISerializerDeserializer<ATime> {
private static final long serialVersionUID = 1L;
public static final ATimeSerializerDeserializer INSTANCE = new ATimeSerializerDeserializer();
private ATimeSerializerDeserializer() {
}
@Override
public ATime deserialize(DataInput in) throws HyracksDataException {
return new ATime(IntegerSerializerDeserializer.read(in));
}
@Override
public void serialize(ATime instance, DataOutput out) throws HyracksDataException {
IntegerSerializerDeserializer.write(instance.getChrononTime(), out);
}
public static int getChronon(byte[] byteArray, int offset) {
return AInt32SerializerDeserializer.getInt(byteArray, offset);
}
}
| {
"content_hash": "40c983bd09a0eca9baeb229dbe03e019",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 97,
"avg_line_length": 34.088235294117645,
"alnum_prop": 0.7842968075927523,
"repo_name": "ty1er/incubator-asterixdb",
"id": "3830e2ab94f1652febe301d13471780d60e7f0da",
"size": "1966",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/ATimeSerializerDeserializer.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "13546"
},
{
"name": "CSS",
"bytes": "22087"
},
{
"name": "Crystal",
"bytes": "453"
},
{
"name": "FreeMarker",
"bytes": "63839"
},
{
"name": "HTML",
"bytes": "128395"
},
{
"name": "Java",
"bytes": "21035886"
},
{
"name": "JavaScript",
"bytes": "600616"
},
{
"name": "PostScript",
"bytes": "224941"
},
{
"name": "Python",
"bytes": "270538"
},
{
"name": "Ruby",
"bytes": "3078"
},
{
"name": "Scheme",
"bytes": "1105"
},
{
"name": "Shell",
"bytes": "236685"
},
{
"name": "Smarty",
"bytes": "31412"
},
{
"name": "TeX",
"bytes": "1848"
}
],
"symlink_target": ""
} |
title: "Electric Light Orchestra - Mr Blue Sky"
release_year: 1977
track_name: "Mr Blue Sky"
artist: "Electric Light Orchestra"
album: "Out of the Blue"
youtube_embed_id: bjPqsDU0j2I
---
| {
"content_hash": "80d361741edd22afc54240ff7e581d6a",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 47,
"avg_line_length": 26.714285714285715,
"alnum_prop": 0.7433155080213903,
"repo_name": "resir014/resir014-tilde",
"id": "bb515f726912f81b9a71383b89900ea6243b3307",
"size": "191",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "content/music/1977/electric-light-orchestra-mr-blue-sky.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "23192"
},
{
"name": "HTML",
"bytes": "13866"
},
{
"name": "JavaScript",
"bytes": "96"
},
{
"name": "Shell",
"bytes": "1011"
}
],
"symlink_target": ""
} |
<?php session_start();
require_once 'conexao.php';
if(isset($_SESSION['auth'])) {
//Confere se já esta logado
if($_SESSION['auth']) {
header("location:dashboardADM.php");
}
} //Faz login
if(isset($_GET['login'])) {
if($_GET['login']=='go') {
//Logou
$login=$_POST['login'];
$email = $login;
$senha=$_POST['senha'];
$login = md5($login);
$senha = md5($senha);
$secretKey="6LfA4TkUAAAAAPWG23mIr5EAD3F8-EBEnM2_uas8";
@$responseKey=$_POST['g-recaptcha-response'];
$userIP=$_SERVER['REMOTE_ADDR'];
$url="https://www.google.com/recaptcha/api/siteverify?secret=$secretKey&response=$responseKey&remoteip=$userIP";
@$response=file_get_contents($url);
@$response=json_decode($response);
$query=mysql_query("SELECT * FROM `administradores` WHERE `Login`= '$login' AND `Senha`= '$senha'");
if(mysql_num_rows($query)==1) {
//Existe um administrador com este login e senha
if(@$response->success) {
$_SESSION['auth']=True;
$_SESSION['email'] = $email;
header("location:dashboardADM.php");
}
else{
echo "<script>alert('Desculpe, ocorreu um problema com sua conexão')</script>";
}
}
else {
echo "<script>alert('Verificação não efetuada!')</script>";
}
}
else {
echo "<script>alert('Login ou senha inválido!')</script>";
}
}
?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="">
<meta name="author" content="">
<title>CCECOMP UEFS</title>
<link href='../css/bootstrap-dropdownhover.min.css' rel="stylesheet">
<!-- Bootstrap Core CSS -->
<link href="../css/bootstrap.min.css" rel="stylesheet">
<!-- Custom CSS -->
<link href="../css/modern-business.css" rel="stylesheet">
<!-- Custom Fonts -->
<link href="../font-awesome/css/font-awesome.min.css" rel="stylesheet" type="text/css">
<!-- Favicon -->
<link rel="icon" type="images/png" sizes="32x32" href="../images/favicon.ico">
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]><script src="https://oss.maxcdn.com/libs/html5shiv/3.7.0/html5shiv.js"></script><script src="https://oss.maxcdn.com/libs/respond.js/1.4.2/respond.min.js"></script><![endif]-->
</head>
<?php require_once 'header.php';
?>
<style>
.jumbotron {
width: 500px;
text-align: center;
margin-left: auto;
margin-right: auto;
margin-top: 20px;
border-radius: 20px;
}
.table {
size: 20px;
}
.captcha {
left: 20%;
top: 20%;
margin-left: 90px;
/* Metade do valor da Largura */
margin-top: -10px;
/* Metade da valor da Altura */
}
</style>
<body>
<div class="jumbotron container">
<form method="POST" action="?login=go">
<h2>Login</h2>
<input name="login" class="form-control" type="text" placeholder="Usuário" />
</br>
<input name="senha" class="form-control" type="password" placeholder="Senha" />
</br>
<div class="captcha">
<div class="g-recaptcha" data-sitekey="6LfA4TkUAAAAAEPyUZEAXbNTGehnUvx2yR-LKo-h"></div>
</div>
<br>
<input class="btn btn-warning btn-lg" type="submit" value="Entrar">
</form>
</div>
<!-- jQuery -->
<script src="../js/jquery.js"></script>
<!-- Bootstrap Core JavaScript -->
<script src="../js/bootstrap.min.js"></script>
<script src='../js/bootstrap-dropdownhover.min.js'></script>
<script src='https://www.google.com/recaptcha/api.js'></script>
<br>
<br>
<br><br><br><br>
<?php require_once 'footer.php';
?>
</body>
</html> | {
"content_hash": "84f600e0add67b6d02414849d699de39",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 196,
"avg_line_length": 33.2578125,
"alnum_prop": 0.5485083392060136,
"repo_name": "EcompJr/CCECOMP",
"id": "c244384d99ce116d75d17edad3331ea1a4bb1cf4",
"size": "4264",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "public_html/login.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "65777"
},
{
"name": "HTML",
"bytes": "234942"
},
{
"name": "Hack",
"bytes": "2246"
},
{
"name": "JavaScript",
"bytes": "44573"
},
{
"name": "PHP",
"bytes": "314498"
}
],
"symlink_target": ""
} |
describe('Add a new todo', function() {
beforeEach(function () {
browser.get('#!/add');
});
it('should have the save button disabled', function() {
var saveButton = element(by.css('[type="submit"]'));
expect(saveButton.isEnabled()).toEqual(false);
});
it('should have the save button enabled after filling in the fields', function() {
var saveButton = element(by.css('[type="submit"]'));
element(by.model("formData.title")).sendKeys("Create E2E spec");
element(by.model("formData.description")).sendKeys("for the entire app");
expect(saveButton.isEnabled()).toEqual(true);
});
it('should save new todo', function() {
var saveButton = element(by.css('[type="submit"]'));
element(by.model("formData.title")).sendKeys("Create E2E spec");
element(by.model("formData.description")).sendKeys("for the entire app");
saveButton.click();
expect(element(by.repeater("todo in todos").row(0).column('title')).getText()).toEqual("Create E2E spec");
expect(element(by.repeater("todo in todos").row(0).column('description')).getText()).toEqual("for the entire app");
});
});
describe('Edit the saved todo', function() {
beforeEach(function () {
browser.get("#!/");
});
it('should open the saved todo', function() {
element(by.repeater("todo in todos").row(0).column("title")).click();
expect(element(by.model("formData.title")).getAttribute("value")).toEqual("Create E2E spec");
expect(element(by.model("formData.description")).getAttribute("value")).toEqual("for the entire app");
});
it('should edit the saved todo', function() {
var saveButton = element(by.css('[type="submit"]'));
element(by.repeater("todo in todos").row(0).column("title")).click();
element(by.model("formData.title")).clear();
element(by.model("formData.title")).sendKeys("Create Protractor E2E spec");
element(by.model("formData.description")).clear();
element(by.model("formData.description")).sendKeys("for the entire app really easy");
saveButton.click();
expect(element(by.repeater("todo in todos").row(0).column('title')).getText()).toEqual("Create Protractor E2E spec");
expect(element(by.repeater("todo in todos").row(0).column('description')).getText()).toEqual("for the entire app really easy");
});
});
describe('Delete the saved todo', function() {
it('should delete the todo', function () {
browser.get("#!/");
element(by.css('table tr:first-child td:last-child button')).click();
element.all(by.repeater("todo in todos")).then(function (arr) {
expect(arr.length).toEqual(0);
});
});
}); | {
"content_hash": "e273d49a8904a7c48e4edf5df8111be4",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 135,
"avg_line_length": 35.44303797468354,
"alnum_prop": 0.6196428571428572,
"repo_name": "lucassp/e2e-testing",
"id": "79388565ffac2530254da101cfaafbe2cef01848",
"size": "2800",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/e2e_spec.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "11221"
}
],
"symlink_target": ""
} |
using System;
using System.Diagnostics;
using HANDLE = System.IntPtr;
using System.Text;
namespace System.Data.SQLite
{
internal partial class Sqlite3
{
#if !SQLITE_CORE
//#define SQLITE_CORE 1 /* Disable the API redefinition in sqlite3ext.h */
const int SQLITE_CORE = 1;
#endif
//#include "sqlite3ext.h"
//#include "sqliteInt.h"
//#include <string.h>
#if !SQLITE_OMIT_LOAD_EXTENSION
/*
** Some API routines are omitted when various features are
** excluded from a build of SQLite. Substitute a NULL pointer
** for any missing APIs.
*/
#if !SQLITE_ENABLE_COLUMN_METADATA
//# define sqlite3_column_database_name 0
//# define sqlite3_column_database_name16 0
//# define sqlite3_column_table_name 0
//# define sqlite3_column_table_name16 0
//# define sqlite3_column_origin_name 0
//# define sqlite3_column_origin_name16 0
//# define sqlite3_table_column_metadata 0
#endif
#if SQLITE_OMIT_AUTHORIZATION
//# define sqlite3_set_authorizer 0
#endif
#if SQLITE_OMIT_UTF16
//# define sqlite3_bind_text16 0
//# define sqlite3_collation_needed16 0
//# define sqlite3_column_decltype16 0
//# define sqlite3_column_name16 0
//# define sqlite3_column_text16 0
//# define sqlite3_complete16 0
//# define sqlite3_create_collation16 0
//# define sqlite3_create_function16 0
//# define sqlite3_errmsg16 0
static string sqlite3_errmsg16(sqlite3 db)
{
return "";
}
//# define sqlite3_open16 0
//# define sqlite3_prepare16 0
//# define sqlite3_prepare16_v2 0
//# define sqlite3_result_error16 0
//# define sqlite3_result_text16 0
static void sqlite3_result_text16(sqlite3_context pCtx, string z, int n, dxDel xDel)
{
}
//# define sqlite3_result_text16be 0
//# define sqlite3_result_text16le 0
//# define sqlite3_value_text16 0
//# define sqlite3_value_text16be 0
//# define sqlite3_value_text16le 0
//# define sqlite3_column_database_name16 0
//# define sqlite3_column_table_name16 0
//# define sqlite3_column_origin_name16 0
#endif
#if SQLITE_OMIT_COMPLETE
//# define sqlite3_complete 0
//# define sqlite3_complete16 0
#endif
#if SQLITE_OMIT_DECLTYPE
//# define sqlite3_column_decltype16 0
//# define sqlite3_column_decltype 0
#endif
#if SQLITE_OMIT_PROGRESS_CALLBACK
//# define sqlite3_progress_handler 0
static void sqlite3_progress_handler (sqlite3 db, int nOps, dxProgress xProgress, object pArg){}
#endif
#if SQLITE_OMIT_VIRTUALTABLE
//# define sqlite3_create_module 0
//# define sqlite3_create_module_v2 0
//# define sqlite3_declare_vtab 0
#endif
#if SQLITE_OMIT_SHARED_CACHE
//# define sqlite3_enable_shared_cache 0
#endif
#if SQLITE_OMIT_TRACE
//# define sqlite3_profile 0
//# define sqlite3_trace 0
#endif
#if SQLITE_OMIT_GET_TABLE
//# define //sqlite3_free_table 0
//# define sqlite3_get_table 0
static public int sqlite3_get_table(
sqlite3 db, /* An open database */
string zSql, /* SQL to be evaluated */
ref string[] pazResult, /* Results of the query */
ref int pnRow, /* Number of result rows written here */
ref int pnColumn, /* Number of result columns written here */
ref string pzErrmsg /* Error msg written here */
)
{
return 0;
}
#endif
#if SQLITE_OMIT_INCRBLOB
//#define sqlite3_bind_zeroblob 0
//#define sqlite3_blob_bytes 0
//#define sqlite3_blob_close 0
//#define sqlite3_blob_open 0
//#define sqlite3_blob_read 0
//#define sqlite3_blob_write 0
#endif
/*
** The following structure contains pointers to all SQLite API routines.
** A pointer to this structure is passed into extensions when they are
** loaded so that the extension can make calls back into the SQLite
** library.
**
** When adding new APIs, add them to the bottom of this structure
** in order to preserve backwards compatibility.
**
** Extensions that use newer APIs should first call the
** sqlite3_libversion_number() to make sure that the API they
** intend to use is supported by the library. Extensions should
** also check to make sure that the pointer to the function is
** not NULL before calling it.
*/
public class sqlite3_api_routines
{
public sqlite3 context_db_handle;
};
static sqlite3_api_routines sqlite3Apis = new sqlite3_api_routines();
//{
// sqlite3_aggregate_context,
#if !SQLITE_OMIT_DEPRECATED
/ sqlite3_aggregate_count,
#else
// 0,
#endif
// sqlite3_bind_blob,
// sqlite3_bind_double,
// sqlite3_bind_int,
// sqlite3_bind_int64,
// sqlite3_bind_null,
// sqlite3_bind_parameter_count,
// sqlite3_bind_parameter_index,
// sqlite3_bind_parameter_name,
// sqlite3_bind_text,
// sqlite3_bind_text16,
// sqlite3_bind_value,
// sqlite3_busy_handler,
// sqlite3_busy_timeout,
// sqlite3_changes,
// sqlite3_close,
// sqlite3_collation_needed,
// sqlite3_collation_needed16,
// sqlite3_column_blob,
// sqlite3_column_bytes,
// sqlite3_column_bytes16,
// sqlite3_column_count,
// sqlite3_column_database_name,
// sqlite3_column_database_name16,
// sqlite3_column_decltype,
// sqlite3_column_decltype16,
// sqlite3_column_double,
// sqlite3_column_int,
// sqlite3_column_int64,
// sqlite3_column_name,
// sqlite3_column_name16,
// sqlite3_column_origin_name,
// sqlite3_column_origin_name16,
// sqlite3_column_table_name,
// sqlite3_column_table_name16,
// sqlite3_column_text,
// sqlite3_column_text16,
// sqlite3_column_type,
// sqlite3_column_value,
// sqlite3_commit_hook,
// sqlite3_complete,
// sqlite3_complete16,
// sqlite3_create_collation,
// sqlite3_create_collation16,
// sqlite3_create_function,
// sqlite3_create_function16,
// sqlite3_create_module,
// sqlite3_data_count,
// sqlite3_db_handle,
// sqlite3_declare_vtab,
// sqlite3_enable_shared_cache,
// sqlite3_errcode,
// sqlite3_errmsg,
// sqlite3_errmsg16,
// sqlite3_exec,
#if !SQLITE_OMIT_DEPRECATED
//sqlite3_expired,
#else
//0,
#endif
// sqlite3_finalize,
// //sqlite3_free,
// //sqlite3_free_table,
// sqlite3_get_autocommit,
// sqlite3_get_auxdata,
// sqlite3_get_table,
// 0, /* Was sqlite3_global_recover(), but that function is deprecated */
// sqlite3_interrupt,
// sqlite3_last_insert_rowid,
// sqlite3_libversion,
// sqlite3_libversion_number,
// sqlite3_malloc,
// sqlite3_mprintf,
// sqlite3_open,
// sqlite3_open16,
// sqlite3_prepare,
// sqlite3_prepare16,
// sqlite3_profile,
// sqlite3_progress_handler,
// sqlite3_realloc,
// sqlite3_reset,
// sqlite3_result_blob,
// sqlite3_result_double,
// sqlite3_result_error,
// sqlite3_result_error16,
// sqlite3_result_int,
// sqlite3_result_int64,
// sqlite3_result_null,
// sqlite3_result_text,
// sqlite3_result_text16,
// sqlite3_result_text16be,
// sqlite3_result_text16le,
// sqlite3_result_value,
// sqlite3_rollback_hook,
// sqlite3_set_authorizer,
// sqlite3_set_auxdata,
// sqlite3_snprintf,
// sqlite3_step,
// sqlite3_table_column_metadata,
#if !SQLITE_OMIT_DEPRECATED
//sqlite3_thread_cleanup,
#else
// 0,
#endif
// sqlite3_total_changes,
// sqlite3_trace,
#if !SQLITE_OMIT_DEPRECATED
//sqlite3_transfer_bindings,
#else
// 0,
#endif
// sqlite3_update_hook,
// sqlite3_user_data,
// sqlite3_value_blob,
// sqlite3_value_bytes,
// sqlite3_value_bytes16,
// sqlite3_value_double,
// sqlite3_value_int,
// sqlite3_value_int64,
// sqlite3_value_numeric_type,
// sqlite3_value_text,
// sqlite3_value_text16,
// sqlite3_value_text16be,
// sqlite3_value_text16le,
// sqlite3_value_type,
// sqlite3_vmprintf,
// /*
// ** The original API set ends here. All extensions can call any
// ** of the APIs above provided that the pointer is not NULL. But
// ** before calling APIs that follow, extension should check the
// ** sqlite3_libversion_number() to make sure they are dealing with
// ** a library that is new enough to support that API.
// *************************************************************************
// */
// sqlite3_overload_function,
// /*
// ** Added after 3.3.13
// */
// sqlite3_prepare_v2,
// sqlite3_prepare16_v2,
// sqlite3_clear_bindings,
// /*
// ** Added for 3.4.1
// */
// sqlite3_create_module_v2,
// /*
// ** Added for 3.5.0
// */
// sqlite3_bind_zeroblob,
// sqlite3_blob_bytes,
// sqlite3_blob_close,
// sqlite3_blob_open,
// sqlite3_blob_read,
// sqlite3_blob_write,
// sqlite3_create_collation_v2,
// sqlite3_file_control,
// sqlite3_memory_highwater,
// sqlite3_memory_used,
#if SQLITE_MUTEX_OMIT
// 0,
// 0,
// 0,
// 0,
// 0,
#else
// sqlite3MutexAlloc,
// sqlite3_mutex_enter,
// sqlite3_mutex_free,
// sqlite3_mutex_leave,
// sqlite3_mutex_try,
#endif
// sqlite3_open_v2,
// sqlite3_release_memory,
// sqlite3_result_error_nomem,
// sqlite3_result_error_toobig,
// sqlite3_sleep,
// sqlite3_soft_heap_limit,
// sqlite3_vfs_find,
// sqlite3_vfs_register,
// sqlite3_vfs_unregister,
// /*
// ** Added for 3.5.8
// */
// sqlite3_threadsafe,
// sqlite3_result_zeroblob,
// sqlite3_result_error_code,
// sqlite3_test_control,
// sqlite3_randomness,
// sqlite3_context_db_handle,
// /*
// ** Added for 3.6.0
// */
// sqlite3_extended_result_codes,
// sqlite3_limit,
// sqlite3_next_stmt,
// sqlite3_sql,
// sqlite3_status,
// /*
// ** Added for 3.7.4
// */
// sqlite3_backup_finish,
// sqlite3_backup_init,
// sqlite3_backup_pagecount,
// sqlite3_backup_remaining,
// sqlite3_backup_step,
//#if !SQLITE_OMIT_COMPILEOPTION_DIAGS
// sqlite3_compileoption_get,
// sqlite3_compileoption_used,
//#else
// 0,
// 0,
//#endif
// sqlite3_create_function_v2,
// sqlite3_db_config,
// sqlite3_db_mutex,
// sqlite3_db_status,
// sqlite3_extended_errcode,
// sqlite3_log,
// sqlite3_soft_heap_limit64,
// sqlite3_sourceid,
// sqlite3_stmt_status,
// sqlite3_strnicmp,
//#if SQLITE_ENABLE_UNLOCK_NOTIFY
// sqlite3_unlock_notify,
//#else
// 0,
//#endif
//#if !SQLITE_OMIT_WAL
// sqlite3_wal_autocheckpoint,
// sqlite3_wal_checkpoint,
// sqlite3_wal_hook,
//#else
// 0,
// 0,
// 0,
//#endif
//};
/*
** Attempt to load an SQLite extension library contained in the file
** zFile. The entry point is zProc. zProc may be 0 in which case a
** default entry point name (sqlite3_extension_init) is used. Use
** of the default name is recommended.
**
** Return SQLITE_OK on success and SQLITE_ERROR if something goes wrong.
**
** If an error occurs and pzErrMsg is not 0, then fill pzErrMsg with
** error message text. The calling function should free this memory
** by calling sqlite3DbFree(db, ).
*/
static int sqlite3LoadExtension(
sqlite3 db, /* Load the extension into this database connection */
string zFile, /* Name of the shared library containing extension */
string zProc, /* Entry point. Use "sqlite3_extension_init" if 0 */
ref string pzErrMsg /* Put error message here if not 0 */
)
{
sqlite3_vfs pVfs = db.pVfs;
HANDLE handle;
dxInit xInit; //int (*xInit)(sqlite3*,char**,const sqlite3_api_routines);
StringBuilder zErrmsg = new StringBuilder(100);
//object aHandle;
const int nMsg = 300;
if (pzErrMsg != null)
pzErrMsg = null;
/* Ticket #1863. To avoid a creating security problems for older
** applications that relink against newer versions of SQLite, the
** ability to run load_extension is turned off by default. One
** must call sqlite3_enable_load_extension() to turn on extension
** loading. Otherwise you get the following error.
*/
if ((db.flags & SQLITE_LoadExtension) == 0)
{
//if( pzErrMsg != null){
pzErrMsg = sqlite3_mprintf("not authorized");
//}
return SQLITE_ERROR;
}
if (zProc == null || zProc == "")
{
zProc = "sqlite3_extension_init";
}
handle = sqlite3OsDlOpen(pVfs, zFile);
if (handle == IntPtr.Zero)
{
// if( pzErrMsg ){
pzErrMsg = "";//*pzErrMsg = zErrmsg = sqlite3_malloc(nMsg);
//if( zErrmsg !=null){
sqlite3_snprintf(nMsg, zErrmsg,
"unable to open shared library [%s]", zFile);
sqlite3OsDlError(pVfs, nMsg - 1, zErrmsg.ToString());
return SQLITE_ERROR;
}
//xInit = (int()(sqlite3*,char**,const sqlite3_api_routines))
// sqlite3OsDlSym(pVfs, handle, zProc);
xInit = (dxInit)sqlite3OsDlSym(pVfs, handle, ref zProc);
Debugger.Break(); // TODO --
//if( xInit==0 ){
// if( pzErrMsg ){
// *pzErrMsg = zErrmsg = sqlite3_malloc(nMsg);
// if( zErrmsg ){
// sqlite3_snprintf(nMsg, zErrmsg,
// "no entry point [%s] in shared library [%s]", zProc,zFile);
// sqlite3OsDlError(pVfs, nMsg-1, zErrmsg);
// }
// sqlite3OsDlClose(pVfs, handle);
// }
// return SQLITE_ERROR;
// }else if( xInit(db, ref zErrmsg, sqlite3Apis) ){
//// if( pzErrMsg !=null){
// pzErrMsg = sqlite3_mprintf("error during initialization: %s", zErrmsg);
// //}
// sqlite3DbFree(db,ref zErrmsg);
// sqlite3OsDlClose(pVfs, ref handle);
// return SQLITE_ERROR;
// }
// /* Append the new shared library handle to the db.aExtension array. */
// aHandle = sqlite3DbMallocZero(db, sizeof(handle)*db.nExtension+1);
// if( aHandle==null ){
// return SQLITE_NOMEM;
// }
// if( db.nExtension>0 ){
// memcpy(aHandle, db.aExtension, sizeof(handle)*(db.nExtension));
// }
// sqlite3DbFree(db,ref db.aExtension);
// db.aExtension = aHandle;
// db.aExtension[db.nExtension++] = handle;
return SQLITE_OK;
}
static public int sqlite3_load_extension(
sqlite3 db, /* Load the extension into this database connection */
string zFile, /* Name of the shared library containing extension */
string zProc, /* Entry point. Use "sqlite3_extension_init" if 0 */
ref string pzErrMsg /* Put error message here if not 0 */
)
{
int rc;
sqlite3_mutex_enter(db.mutex);
rc = sqlite3LoadExtension(db, zFile, zProc, ref pzErrMsg);
rc = sqlite3ApiExit(db, rc);
sqlite3_mutex_leave(db.mutex);
return rc;
}
/*
** Call this routine when the database connection is closing in order
** to clean up loaded extensions
*/
static void sqlite3CloseExtensions(sqlite3 db)
{
int i;
Debug.Assert(sqlite3_mutex_held(db.mutex));
for (i = 0; i < db.nExtension; i++)
{
sqlite3OsDlClose(db.pVfs, (HANDLE)db.aExtension[i]);
}
sqlite3DbFree(db, ref db.aExtension);
}
/*
** Enable or disable extension loading. Extension loading is disabled by
** default so as not to open security holes in older applications.
*/
static public int sqlite3_enable_load_extension(sqlite3 db, int onoff)
{
sqlite3_mutex_enter(db.mutex);
if (onoff != 0)
{
db.flags |= SQLITE_LoadExtension;
}
else
{
db.flags &= ~SQLITE_LoadExtension;
}
sqlite3_mutex_leave(db.mutex);
return SQLITE_OK;
}
#endif //* SQLITE_OMIT_LOAD_EXTENSION */
/*
** The auto-extension code added regardless of whether or not extension
** loading is supported. We need a dummy sqlite3Apis pointer for that
** code if regular extension loading is not available. This is that
** dummy pointer.
*/
#if SQLITE_OMIT_LOAD_EXTENSION
const sqlite3_api_routines sqlite3Apis = null;
#endif
/*
** The following object holds the list of automatically loaded
** extensions.
**
** This list is shared across threads. The SQLITE_MUTEX_STATIC_MASTER
** mutex must be held while accessing this list.
*/
//typedef struct sqlite3AutoExtList sqlite3AutoExtList;
public class sqlite3AutoExtList
{
public int nExt = 0; /* Number of entries in aExt[] */
public dxInit[] aExt = null; /* Pointers to the extension init functions */
public sqlite3AutoExtList(int nExt, dxInit[] aExt)
{
this.nExt = nExt;
this.aExt = aExt;
}
}
static sqlite3AutoExtList sqlite3Autoext = new sqlite3AutoExtList(0, null);
/* The "wsdAutoext" macro will resolve to the autoextension
** state vector. If writable static data is unsupported on the target,
** we have to locate the state vector at run-time. In the more common
** case where writable static data is supported, wsdStat can refer directly
** to the "sqlite3Autoext" state vector declared above.
*/
#if SQLITE_OMIT_WSD
//# define wsdAutoextInit \
sqlite3AutoExtList *x = &GLOBAL(sqlite3AutoExtList,sqlite3Autoext)
//# define wsdAutoext x[0]
#else
//# define wsdAutoextInit
static void wsdAutoextInit()
{
}
//# define wsdAutoext sqlite3Autoext
static sqlite3AutoExtList wsdAutoext = sqlite3Autoext;
#endif
/*
** Register a statically linked extension that is automatically
** loaded by every new database connection.
*/
static int sqlite3_auto_extension(dxInit xInit)
{
int rc = SQLITE_OK;
#if !SQLITE_OMIT_AUTOINIT
rc = sqlite3_initialize();
if (rc != 0)
{
return rc;
}
else
#endif
{
int i;
#if SQLITE_THREADSAFE
sqlite3_mutex mutex = sqlite3MutexAlloc( SQLITE_MUTEX_STATIC_MASTER );
#else
sqlite3_mutex mutex = sqlite3MutexAlloc(SQLITE_MUTEX_STATIC_MASTER);
#endif
wsdAutoextInit();
sqlite3_mutex_enter(mutex);
for (i = 0; i < wsdAutoext.nExt; i++)
{
if (wsdAutoext.aExt[i] == xInit)
break;
}
//if( i==wsdAutoext.nExt ){
// int nByte = (wsdAutoext.nExt+1)*sizeof(wsdAutoext.aExt[0]);
// void **aNew;
// aNew = sqlite3_realloc(wsdAutoext.aExt, nByte);
// if( aNew==0 ){
// rc = SQLITE_NOMEM;
// }else{
Array.Resize(ref wsdAutoext.aExt, wsdAutoext.nExt + 1);// wsdAutoext.aExt = aNew;
wsdAutoext.aExt[wsdAutoext.nExt] = xInit;
wsdAutoext.nExt++;
//}
sqlite3_mutex_leave(mutex);
Debug.Assert((rc & 0xff) == rc);
return rc;
}
}
/*
** Reset the automatic extension loading mechanism.
*/
static void sqlite3_reset_auto_extension()
{
#if !SQLITE_OMIT_AUTOINIT
if (sqlite3_initialize() == SQLITE_OK)
#endif
{
#if SQLITE_THREADSAFE
sqlite3_mutex mutex = sqlite3MutexAlloc( SQLITE_MUTEX_STATIC_MASTER );
#else
sqlite3_mutex mutex = sqlite3MutexAlloc(SQLITE_MUTEX_STATIC_MASTER);
#endif
wsdAutoextInit();
sqlite3_mutex_enter(mutex);
#if SQLITE_OMIT_WSD
//sqlite3_free( ref wsdAutoext.aExt );
wsdAutoext.aExt = null;
wsdAutoext.nExt = 0;
#else
//sqlite3_free( ref sqlite3Autoext.aExt );
sqlite3Autoext.aExt = null;
sqlite3Autoext.nExt = 0;
#endif
sqlite3_mutex_leave(mutex);
}
}
/*
** Load all automatic extensions.
**
** If anything goes wrong, set an error in the database connection.
*/
static void sqlite3AutoLoadExtensions(sqlite3 db)
{
int i;
bool go = true;
dxInit xInit;//)(sqlite3*,char**,const sqlite3_api_routines);
wsdAutoextInit();
#if SQLITE_OMIT_WSD
if ( wsdAutoext.nExt == 0 )
#else
if (sqlite3Autoext.nExt == 0)
#endif
{
/* Common case: early out without every having to acquire a mutex */
return;
}
for (i = 0; go; i++)
{
string zErrmsg = "";
#if SQLITE_THREADSAFE
sqlite3_mutex mutex = sqlite3MutexAlloc( SQLITE_MUTEX_STATIC_MASTER );
#else
sqlite3_mutex mutex = sqlite3MutexAlloc(SQLITE_MUTEX_STATIC_MASTER);
#endif
sqlite3_mutex_enter(mutex);
if (i >= wsdAutoext.nExt)
{
xInit = null;
go = false;
}
else
{
xInit = (dxInit)
wsdAutoext.aExt[i];
}
sqlite3_mutex_leave(mutex);
zErrmsg = "";
if (xInit != null && xInit(db, ref zErrmsg, (sqlite3_api_routines)sqlite3Apis) != 0)
{
sqlite3Error(db, SQLITE_ERROR,
"automatic extension loading failed: %s", zErrmsg);
go = false;
}
sqlite3DbFree(db, ref zErrmsg);
}
}
}
}
| {
"content_hash": "8d27bbea77114ac72257a5446b632c19",
"timestamp": "",
"source": "github",
"line_count": 724,
"max_line_length": 102,
"avg_line_length": 28.009668508287294,
"alnum_prop": 0.6428324868090143,
"repo_name": "jcwmoore/athena",
"id": "34565bd6009fcbe897e7233c0c6c65444fc750bb",
"size": "21159",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Athena.SQLite/src/loadext_c.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "5066643"
}
],
"symlink_target": ""
} |
package org.apache.jackrabbit.oak.security.user;
import java.security.PrivilegedExceptionAction;
import java.util.HashMap;
import java.util.Map;
import javax.jcr.GuestCredentials;
import javax.jcr.SimpleCredentials;
import javax.security.auth.Subject;
import javax.security.auth.login.LoginException;
import com.google.common.collect.Iterables;
import org.apache.jackrabbit.api.security.user.Authorizable;
import org.apache.jackrabbit.api.security.user.User;
import org.apache.jackrabbit.api.security.user.UserManager;
import org.apache.jackrabbit.oak.AbstractSecurityTest;
import org.apache.jackrabbit.oak.Oak;
import org.apache.jackrabbit.oak.api.ContentRepository;
import org.apache.jackrabbit.oak.api.ContentSession;
import org.apache.jackrabbit.oak.api.Root;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.namepath.NamePathMapper;
import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
import org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexEditorProvider;
import org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexProvider;
import org.apache.jackrabbit.oak.plugins.nodetype.TypeEditorProvider;
import org.apache.jackrabbit.oak.plugins.nodetype.write.InitialContent;
import org.apache.jackrabbit.oak.security.SecurityProviderImpl;
import org.apache.jackrabbit.oak.spi.security.ConfigurationParameters;
import org.apache.jackrabbit.oak.spi.security.SecurityProvider;
import org.apache.jackrabbit.oak.spi.security.authentication.SystemSubject;
import org.apache.jackrabbit.oak.spi.security.principal.AdminPrincipal;
import org.apache.jackrabbit.oak.spi.security.user.UserConfiguration;
import org.apache.jackrabbit.oak.spi.security.user.UserConstants;
import org.apache.jackrabbit.oak.spi.security.user.util.UserUtil;
import org.apache.jackrabbit.oak.util.TreeUtil;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* @since OAK 1.0
*/
public class UserInitializerTest extends AbstractSecurityTest {
private UserManager userMgr;
private ConfigurationParameters config;
@Override
@Before
public void before() throws Exception {
super.before();
userMgr = getUserManager(root);
config = getUserConfiguration().getParameters();
}
@Test
public void testBuildInUserExist() throws Exception {
assertNotNull(userMgr.getAuthorizable(UserUtil.getAdminId(config)));
assertNotNull(userMgr.getAuthorizable(UserUtil.getAnonymousId(config)));
}
@Test
public void testAdminUser() throws Exception {
Authorizable a = userMgr.getAuthorizable(UserUtil.getAdminId(config));
assertFalse(a.isGroup());
User admin = (User) a;
assertTrue(admin.isAdmin());
assertTrue(admin.getPrincipal() instanceof AdminPrincipal);
assertTrue(admin.getPrincipal() instanceof TreeBasedPrincipal);
assertEquals(admin.getID(), admin.getPrincipal().getName());
}
@Test
public void testAnonymous() throws Exception {
Authorizable a = userMgr.getAuthorizable(UserUtil.getAnonymousId(config));
assertFalse(a.isGroup());
User anonymous = (User) a;
assertFalse(anonymous.isAdmin());
assertFalse(anonymous.getPrincipal() instanceof AdminPrincipal);
assertTrue(anonymous.getPrincipal() instanceof TreeBasedPrincipal);
assertEquals(anonymous.getID(), anonymous.getPrincipal().getName());
}
@Test
public void testUserContent() throws Exception {
Authorizable a = userMgr.getAuthorizable(UserUtil.getAdminId(config));
assertTrue(root.getTree(a.getPath()).exists());
a = userMgr.getAuthorizable(UserUtil.getAnonymousId(config));
assertTrue(root.getTree(a.getPath()).exists());
}
@Test
public void testUserIndexDefinitions() throws Exception {
Tree oakIndex = root.getTree('/' + IndexConstants.INDEX_DEFINITIONS_NAME);
assertTrue(oakIndex.exists());
Tree id = oakIndex.getChild("authorizableId");
assertIndexDefinition(id, UserConstants.REP_AUTHORIZABLE_ID, true);
Tree princName = oakIndex.getChild("principalName");
assertIndexDefinition(princName, UserConstants.REP_PRINCIPAL_NAME, true);
Iterable<String> declaringNtNames = TreeUtil.getStrings(princName, IndexConstants.DECLARING_NODE_TYPES);
assertArrayEquals(
new String[]{UserConstants.NT_REP_AUTHORIZABLE},
Iterables.toArray(declaringNtNames, String.class));
}
/**
* @since OAK 1.0 The configuration defines if the password of the
* admin user is being set.
*/
@Test
public void testAdminConfiguration() throws Exception {
Map<String,Object> userParams = new HashMap();
userParams.put(UserConstants.PARAM_ADMIN_ID, "admin");
userParams.put(UserConstants.PARAM_OMIT_ADMIN_PW, true);
ConfigurationParameters params = ConfigurationParameters.of(UserConfiguration.NAME, ConfigurationParameters.of(userParams));
SecurityProvider sp = new SecurityProviderImpl(params);
final ContentRepository repo = new Oak().with(new InitialContent())
.with(new PropertyIndexEditorProvider())
.with(new PropertyIndexProvider())
.with(new TypeEditorProvider())
.with(sp)
.createContentRepository();
ContentSession cs = Subject.doAs(SystemSubject.INSTANCE, new PrivilegedExceptionAction<ContentSession>() {
@Override
public ContentSession run() throws Exception {
return repo.login(null, null);
}
});
try {
Root root = cs.getLatestRoot();
UserConfiguration uc = sp.getConfiguration(UserConfiguration.class);
UserManager umgr = uc.getUserManager(root, NamePathMapper.DEFAULT);
Authorizable adminUser = umgr.getAuthorizable("admin");
assertNotNull(adminUser);
Tree adminTree = root.getTree(adminUser.getPath());
assertTrue(adminTree.exists());
assertNull(adminTree.getProperty(UserConstants.REP_PASSWORD));
} finally {
cs.close();
}
// login as admin should fail
ContentSession adminSession = null;
try {
adminSession = repo.login(new SimpleCredentials("admin", new char[0]), null);
fail();
} catch (LoginException e) {
//success
} finally {
if (adminSession != null) {
adminSession.close();
}
}
}
/**
* @since OAK 1.0 The anonymous user is optional.
*/
@Test
public void testAnonymousConfiguration() throws Exception {
Map<String,Object> userParams = new HashMap();
userParams.put(UserConstants.PARAM_ANONYMOUS_ID, "");
ConfigurationParameters params = ConfigurationParameters.of(UserConfiguration.NAME, ConfigurationParameters.of(userParams));
SecurityProvider sp = new SecurityProviderImpl(params);
final ContentRepository repo = new Oak().with(new InitialContent())
.with(new PropertyIndexEditorProvider())
.with(new PropertyIndexProvider())
.with(new TypeEditorProvider())
.with(sp)
.createContentRepository();
ContentSession cs = Subject.doAs(SystemSubject.INSTANCE, new PrivilegedExceptionAction<ContentSession>() {
@Override
public ContentSession run() throws Exception {
return repo.login(null, null);
}
});
try {
Root root = cs.getLatestRoot();
UserConfiguration uc = sp.getConfiguration(UserConfiguration.class);
UserManager umgr = uc.getUserManager(root, NamePathMapper.DEFAULT);
Authorizable anonymous = umgr.getAuthorizable(UserConstants.DEFAULT_ANONYMOUS_ID);
assertNull(anonymous);
} finally {
cs.close();
}
// login as admin should fail
ContentSession anonymousSession = null;
try {
anonymousSession = repo.login(new GuestCredentials(), null);
fail();
} catch (LoginException e) {
//success
} finally {
if (anonymousSession != null) {
anonymousSession.close();
}
}
}
private static void assertIndexDefinition(Tree tree, String propName, boolean isUnique) {
assertTrue(tree.exists());
assertEquals(isUnique, TreeUtil.getBoolean(tree, IndexConstants.UNIQUE_PROPERTY_NAME));
assertArrayEquals(
propName, new String[]{propName},
Iterables.toArray(TreeUtil.getStrings(tree, IndexConstants.PROPERTY_NAMES), String.class));
}
}
| {
"content_hash": "8907d55c9dcbcb6733db512a2d340f9e",
"timestamp": "",
"source": "github",
"line_count": 228,
"max_line_length": 132,
"avg_line_length": 40.20175438596491,
"alnum_prop": 0.6873227143792275,
"repo_name": "AndreasAbdi/jackrabbit-oak",
"id": "dc3111a8c02f8bfc4e99a4d2aaa7220379130bbb",
"size": "9968",
"binary": false,
"copies": "8",
"ref": "refs/heads/trunk",
"path": "oak-core/src/test/java/org/apache/jackrabbit/oak/security/user/UserInitializerTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "3451"
},
{
"name": "Groovy",
"bytes": "87994"
},
{
"name": "Java",
"bytes": "15520702"
},
{
"name": "JavaScript",
"bytes": "42583"
},
{
"name": "Perl",
"bytes": "7585"
},
{
"name": "Shell",
"bytes": "17311"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<!--[if IE 8]> <html lang="en" class="ie8"> <![endif]-->
<!--[if !IE]><!-->
<html lang="en">
<!--<![endif]-->
<head>
<meta charset="utf-8" />
<title>Color Admin | Tabs & Accordions</title>
<meta content="width=device-width, initial-scale=1.0" name="viewport" />
<meta content="" name="description" />
<meta content="" name="author" />
<!-- ================== BEGIN BASE CSS STYLE ================== -->
<link href="assets/plugins/jquery-ui-1.10.4/jquery-ui.css" rel="stylesheet" />
<link href="assets/plugins/bootstrap-3.1.1/css/bootstrap.css" rel="stylesheet" />
<link href="assets/plugins/font-awesome-4.1.0/css/font-awesome.min.css" rel="stylesheet" />
<link href="assets/css/animate.css" rel="stylesheet" />
<link href="assets/css/style.css" rel="stylesheet" />
<link href="assets/css/style-responsive.css" rel="stylesheet" />
<!-- ================== END BASE CSS STYLE ================== -->
</head>
<body>
<!-- begin #page-loader -->
<div id="page-loader" class="fade in"><span class="spinner"></span></div>
<!-- end #page-loader -->
<!-- begin #page-container -->
<div id="page-container" class="fade">
<!-- begin #header -->
<div id="header" class="header navbar navbar-default navbar-fixed-top">
<!-- begin container-fluid -->
<div class="container-fluid">
<!-- begin mobile sidebar expand / collapse button -->
<div class="navbar-header">
<a href="index.html" class="navbar-brand"><span class="navbar-logo"></span> Color Admin</a>
<button type="button" class="navbar-toggle" data-click="sidebar-toggled">
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
</div>
<!-- end mobile sidebar expand / collapse button -->
<!-- begin header navigation right -->
<ul class="nav navbar-nav navbar-right">
<li>
<form class="navbar-form full-width">
<div class="form-group">
<input type="text" class="form-control" placeholder="Enter keyword" />
<button type="submit" class="btn btn-search"><i class="fa fa-search"></i></button>
</div>
</form>
</li>
<li class="dropdown">
<a href="javascript:;" data-toggle="dropdown" class="dropdown-toggle f-s-14">
<i class="fa fa-bell-o"></i>
<span class="label">5</span>
</a>
<ul class="dropdown-menu media-list pull-right animated fadeInDown">
<li class="dropdown-header">Notifications (5)</li>
<li class="media">
<a href="javascript:;">
<div class="pull-left media-object bg-red"><i class="fa fa-bug"></i></div>
<div class="media-body">
<h6 class="media-heading">Server Error Reports</h6>
<div class="text-muted">3 minutes ago</div>
</div>
</a>
</li>
<li class="media">
<a href="javascript:;">
<div class="pull-left"><img src="assets/img/user-1.jpg" class="media-object" alt="" /></div>
<div class="media-body">
<h6 class="media-heading">John Smith</h6>
<p>Quisque pulvinar tellus sit amet sem scelerisque tincidunt.</p>
<div class="text-muted">25 minutes ago</div>
</div>
</a>
</li>
<li class="media">
<a href="javascript:;">
<div class="pull-left"><img src="assets/img/user-2.jpg" class="media-object" alt="" /></div>
<div class="media-body">
<h6 class="media-heading">Olivia</h6>
<p>Quisque pulvinar tellus sit amet sem scelerisque tincidunt.</p>
<div class="text-muted">35 minutes ago</div>
</div>
</a>
</li>
<li class="media">
<a href="javascript:;">
<div class="pull-left media-object bg-green"><i class="fa fa-plus"></i></div>
<div class="media-body">
<h6 class="media-heading"> New User Registered</h6>
<div class="text-muted">1 hour ago</div>
</div>
</a>
</li>
<li class="media">
<a href="javascript:;">
<div class="pull-left media-object bg-blue"><i class="fa fa-envelope"></i></div>
<div class="media-body">
<h6 class="media-heading"> New Email From John</h6>
<div class="text-muted">2 hour ago</div>
</div>
</a>
</li>
<li class="dropdown-footer text-center">
<a href="javascript:;">View more</a>
</li>
</ul>
</li>
<li class="dropdown navbar-user">
<a href="javascript:;" class="dropdown-toggle" data-toggle="dropdown">
<img src="assets/img/user-11.jpg" alt="" />
<span class="hidden-xs">Adam Schwartz</span> <b class="caret"></b>
</a>
<ul class="dropdown-menu animated fadeInLeft">
<li class="arrow"></li>
<li><a href="javascript:;">Edit Profile</a></li>
<li><a href="javascript:;"><span class="badge badge-danger pull-right">2</span> Inbox</a></li>
<li><a href="javascript:;">Calendar</a></li>
<li><a href="javascript:;">Setting</a></li>
<li class="divider"></li>
<li><a href="javascript:;">Log Out</a></li>
</ul>
</li>
</ul>
<!-- end header navigation right -->
</div>
<!-- end container-fluid -->
</div>
<!-- end #header -->
<!-- begin #sidebar -->
<div id="sidebar" class="sidebar">
<!-- begin sidebar scrollbar -->
<div data-scrollbar="true" data-height="100%">
<!-- begin sidebar user -->
<ul class="nav">
<li class="nav-profile">
<div class="image">
<a href="javascript:;"><img src="assets/img/user-11.jpg" alt="" /></a>
</div>
<div class="info">
Sean Ngu
<small>Front end developer</small>
</div>
</li>
</ul>
<!-- end sidebar user -->
<!-- begin sidebar nav -->
<ul class="nav">
<li class="nav-header">Navigation</li>
<li>
<a href="index.html"><i class="fa fa-laptop"></i> <span>Dashboard</span></a>
</li>
<li>
<a href="inbox.html">
<span class="badge pull-right">10</span>
<i class="fa fa-inbox"></i> <span>Inbox</span>
</a>
</li>
<li class="has-sub active">
<a href="javascript:;">
<i class="fa fa-suitcase"></i>
<b class="caret pull-right"></b>
<span>UI Elements</span>
</a>
<ul class="sub-menu">
<li><a href="ui_general.html">General</a></li>
<li><a href="ui_typography.html">Typography</a></li>
<li class="active"><a href="ui_tabs_accordions.html">Tabs & Accordions</a></li>
<li><a href="ui_modal_notification.html">Modal & Notification</a></li>
<li><a href="ui_widget_boxes.html">Widget Boxes</a></li>
<li><a href="ui_media_object.html">Media Object</a></li>
<li><a href="ui_buttons.html">Buttons</a></li>
<li><a href="ui_icons.html">Icons</a></li>
</ul>
</li>
<li class="has-sub">
<a href="javascript:;">
<i class="fa fa-file-o"></i>
<b class="caret pull-right"></b>
<span>Form Stuff</span>
</a>
<ul class="sub-menu">
<li><a href="form_elements.html">Form Elements</a></li>
<li><a href="form_plugins.html">Form Plugins</a></li>
<li><a href="form_validation.html">Form Validation</a></li>
<li><a href="form_wizards.html">Wizards</a></li>
<li><a href="form_wysiwyg.html">WYSIWYG</a></li>
</ul>
</li>
<li class="has-sub">
<a href="javascript:;">
<b class="caret pull-right"></b>
<i class="fa fa-th"></i>
<span>Tables</span>
</a>
<ul class="sub-menu">
<li><a href="table_basic.html">Basic Tables</a></li>
<li><a href="table_manage.html">Managed Tables</a></li>
</ul>
</li>
<li class="has-sub">
<a href="javascript:;">
<i class="fa fa-envelope"></i>
<b class="caret pull-right"></b>
<span>
Email Template
<span class="label label-success m-l-5">NEW</span>
</span>
</a>
<ul class="sub-menu">
<li><a href="email_system.html">System Template <i class="fa fa-paper-plane text-success m-l-5"></i></a></li>
<li><a href="email_newsletter.html">Newsletter Template <i class="fa fa-paper-plane text-success m-l-5"></i></a></li>
</ul>
</li>
<li><a href="charts.html"><i class="fa fa-signal"></i> <span>Charts</span></a></li>
<li><a href="calendar.html"><i class="fa fa-calendar"></i> <span>Calendar</span></a></li>
<li class="has-sub">
<a href="javascript:;">
<i class="fa fa-map-marker"></i>
<b class="caret pull-right"></b>
<span>Map</span>
</a>
<ul class="sub-menu">
<li><a href="map_vector.html">Vector Map</a></li>
<li><a href="map_google.html">Google Map</a></li>
</ul>
</li>
<li><a href="gallery.html"><i class="fa fa-camera"></i> <span>Gallery</span></a></li>
<li class="has-sub">
<a href="javascript:;">
<i class="fa fa-cogs"></i>
<b class="caret pull-right"></b>
<span>Page Options</span>
</a>
<ul class="sub-menu">
<li><a href="page_blank.html">Blank Page</a></li>
<li><a href="page_with_footer.html">Page with Footer</a></li>
<li><a href="page_without_sidebar.html">Page without Sidebar</a></li>
<li><a href="page_with_right_sidebar.html">Page with Right Sidebar</a></li>
<li><a href="page_with_minified_sidebar.html">Page with Minified Sidebar</a></li>
</ul>
</li>
<li class="has-sub">
<a href="javascript:;">
<i class="fa fa-gift"></i>
<b class="caret pull-right"></b>
<span>
Extra
<span class="label label-success m-l-5">NEW</span>
</span>
</a>
<ul class="sub-menu">
<li><a href="extra_timeline.html">Timeline <i class="fa fa-paper-plane text-success m-l-5"></i></a></li>
<li><a href="extra_coming_soon.html">Coming Soon Page <i class="fa fa-paper-plane text-success m-l-5"></i></a></li>
<li><a href="extra_search_results.html">Search Results</a></li>
<li><a href="extra_invoice.html">Invoice</a></li>
<li><a href="extra_404_error.html">404 Error Page</a></li>
<li><a href="extra_login.html">Login</a></li>
</ul>
</li>
<li class="has-sub">
<a href="javascript:;">
<i class="fa fa-align-left"></i>
<b class="caret pull-right"></b>
<span>Menu Level</span>
</a>
<ul class="sub-menu">
<li class="has-sub">
<a href="javascript:;">
<b class="caret pull-right"></b>
Menu 1.1
</a>
<ul class="sub-menu">
<li class="has-sub">
<a href="javascript:;">
<b class="caret pull-right"></b>
Menu 2.1
</a>
<ul class="sub-menu">
<li><a href="javascript:;">Menu 3.1</a></li>
<li><a href="javascript:;">Menu 3.2</a></li>
</ul>
</li>
<li><a href="javascript:;">Menu 2.2</a></li>
<li><a href="javascript:;">Menu 2.3</a></li>
</ul>
</li>
<li><a href="javascript:;">Menu 1.2</a></li>
<li><a href="javascript:;">Menu 1.3</a></li>
</ul>
</li>
<!-- begin sidebar minify button -->
<li><a href="javascript:;" class="sidebar-minify-btn" data-click="sidebar-minify"><i class="fa fa-angle-double-left"></i></a></li>
<!-- end sidebar minify button -->
</ul>
<!-- end sidebar nav -->
</div>
<!-- end sidebar scrollbar -->
</div>
<!-- end #sidebar -->
<!-- begin #content -->
<div id="content" class="content">
<!-- begin breadcrumb -->
<ol class="breadcrumb pull-right">
<li><a href="javascript:;">Home</a></li>
<li><a href="javascript:;">UI Elements</a></li>
<li class="active">Tabs & Accordions</li>
</ol>
<!-- end breadcrumb -->
<!-- begin page-header -->
<h1 class="page-header">Tabs & Accordions <small>header small text goes here...</small></h1>
<!-- end page-header -->
<!-- begin row -->
<div class="row">
<!-- begin col-6 -->
<div class="col-md-6">
<ul class="nav nav-tabs">
<li class="active"><a href="#default-tab-1" data-toggle="tab">Default Tab 1</a></li>
<li class=""><a href="#default-tab-2" data-toggle="tab">Default Tab 2</a></li>
<li class=""><a href="#default-tab-3" data-toggle="tab">Default Tab 3</a></li>
</ul>
<div class="tab-content">
<div class="tab-pane fade active in" id="default-tab-1">
<h3 class="m-t-10"><i class="fa fa-cog"></i> Lorem ipsum dolor sit amet</h3>
<p>
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
Integer ac dui eu felis hendrerit lobortis. Phasellus elementum, nibh eget adipiscing porttitor,
est diam sagittis orci, a ornare nisi quam elementum tortor. Proin interdum ante porta est convallis
dapibus dictum in nibh. Aenean quis massa congue metus mollis fermentum eget et tellus.
Aenean tincidunt, mauris ut dignissim lacinia, nisi urna consectetur sapien, nec eleifend orci eros id lectus.
</p>
<p class="text-right m-b-0">
<a href="javascript:;" class="btn btn-white m-r-5">Default</a>
<a href="javascript:;" class="btn btn-primary">Primary</a>
</p>
</div>
<div class="tab-pane fade" id="default-tab-2">
<blockquote>
<p>Lorem ipsum dolor sit amet, consectetur adipiscing elit.</p>
<small>Someone famous in <cite title="Source Title">Source Title</cite></small>
</blockquote>
<h4>Lorem ipsum dolor sit amet</h4>
<p>
Nullam ac sapien justo. Nam augue mauris, malesuada non magna sed, feugiat blandit ligula.
In tristique tincidunt purus id iaculis. Pellentesque volutpat tortor a mauris convallis,
sit amet scelerisque lectus adipiscing.
</p>
</div>
<div class="tab-pane fade" id="default-tab-3">
<p>
<span class="fa-stack fa-4x pull-left m-r-10">
<i class="fa fa-square-o fa-stack-2x"></i>
<i class="fa fa-twitter fa-stack-1x"></i>
</span>
Praesent tincidunt nulla ut elit vestibulum viverra. Sed placerat magna eget eros accumsan elementum.
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nam quis lobortis neque.
Maecenas justo odio, bibendum fringilla quam nec, commodo rutrum quam.
Donec cursus erat in lacus congue sodales. Nunc bibendum id augue sit amet placerat.
Quisque et quam id felis tempus volutpat at at diam. Vivamus ac diam turpis.Sed at lacinia augue.
Nulla facilisi. Fusce at erat suscipit, dapibus elit quis, luctus nulla.
Quisque adipiscing dui nec orci fermentum blandit.
Sed at lacinia augue. Nulla facilisi. Fusce at erat suscipit, dapibus elit quis, luctus nulla.
Quisque adipiscing dui nec orci fermentum blandit.
</p>
</div>
</div>
<ul class="nav nav-pills">
<li class="active"><a href="#nav-pills-tab-1" data-toggle="tab">Pills Tab 1</a></li>
<li><a href="#nav-pills-tab-2" data-toggle="tab">Pills Tab 2</a></li>
<li><a href="#nav-pills-tab-3" data-toggle="tab">Pills Tab 3</a></li>
<li><a href="#nav-pills-tab-4" data-toggle="tab">Pills Tab 4</a></li>
</ul>
<div class="tab-content">
<div class="tab-pane fade active in" id="nav-pills-tab-1">
<h3 class="m-t-10">Nav Pills Tab 1</h3>
<p>
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
Integer ac dui eu felis hendrerit lobortis. Phasellus elementum, nibh eget adipiscing porttitor,
est diam sagittis orci, a ornare nisi quam elementum tortor.
Proin interdum ante porta est convallis dapibus dictum in nibh.
Aenean quis massa congue metus mollis fermentum eget et tellus.
Aenean tincidunt, mauris ut dignissim lacinia, nisi urna consectetur sapien,
nec eleifend orci eros id lectus.
</p>
</div>
<div class="tab-pane fade" id="nav-pills-tab-2">
<h3 class="m-t-10">Nav Pills Tab 2</h3>
<p>
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
Integer ac dui eu felis hendrerit lobortis. Phasellus elementum, nibh eget adipiscing porttitor,
est diam sagittis orci, a ornare nisi quam elementum tortor.
Proin interdum ante porta est convallis dapibus dictum in nibh.
Aenean quis massa congue metus mollis fermentum eget et tellus.
Aenean tincidunt, mauris ut dignissim lacinia, nisi urna consectetur sapien,
nec eleifend orci eros id lectus.
</p>
</div>
<div class="tab-pane fade" id="nav-pills-tab-3">
<h3 class="m-t-10">Nav Pills Tab 3</h3>
<p>
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
Integer ac dui eu felis hendrerit lobortis. Phasellus elementum, nibh eget adipiscing porttitor,
est diam sagittis orci, a ornare nisi quam elementum tortor.
Proin interdum ante porta est convallis dapibus dictum in nibh.
Aenean quis massa congue metus mollis fermentum eget et tellus.
Aenean tincidunt, mauris ut dignissim lacinia, nisi urna consectetur sapien,
nec eleifend orci eros id lectus.
</p>
</div>
<div class="tab-pane fade" id="nav-pills-tab-4">
<h3 class="m-t-10">Nav Pills Tab 4</h3>
<p>
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
Integer ac dui eu felis hendrerit lobortis. Phasellus elementum, nibh eget adipiscing porttitor,
est diam sagittis orci, a ornare nisi quam elementum tortor.
Proin interdum ante porta est convallis dapibus dictum in nibh.
Aenean quis massa congue metus mollis fermentum eget et tellus.
Aenean tincidunt, mauris ut dignissim lacinia, nisi urna consectetur sapien,
nec eleifend orci eros id lectus.
</p>
</div>
</div>
</div>
<!-- end col-6 -->
<!-- begin col-6 -->
<div class="col-md-6">
<div class="panel-group" id="accordion">
<div class="panel panel-inverse">
<div class="panel-heading">
<h3 class="panel-title">
<a class="accordion-toggle" data-toggle="collapse" data-parent="#accordion" href="#collapseOne">
<i class="fa fa-plus-circle pull-right"></i>
Collapsible Group Item #1
</a>
</h3>
</div>
<div id="collapseOne" class="panel-collapse collapse in">
<div class="panel-body">
Anim pariatur cliche reprehenderit, enim eiusmod high life accusamus terry richardson ad squid. 3 wolf moon officia aute, non cupidatat skateboard dolor brunch. Food truck quinoa nesciunt laborum eiusmod. Brunch 3 wolf moon tempor, sunt aliqua put a bird on it squid single-origin coffee nulla assumenda shoreditch et. Nihil anim keffiyeh helvetica, craft beer labore wes anderson cred nesciunt sapiente ea proident. Ad vegan excepteur butcher vice lomo. Leggings occaecat craft beer farm-to-table, raw denim aesthetic synth nesciunt you probably haven't heard of them accusamus labore sustainable VHS.
</div>
</div>
</div>
<div class="panel panel-inverse">
<div class="panel-heading">
<h3 class="panel-title">
<a class="accordion-toggle" data-toggle="collapse" data-parent="#accordion" href="#collapseTwo">
<i class="fa fa-plus-circle pull-right"></i>
Collapsible Group Item #2
</a>
</h3>
</div>
<div id="collapseTwo" class="panel-collapse collapse">
<div class="panel-body">
Anim pariatur cliche reprehenderit, enim eiusmod high life accusamus terry richardson ad squid. 3 wolf moon officia aute, non cupidatat skateboard dolor brunch. Food truck quinoa nesciunt laborum eiusmod. Brunch 3 wolf moon tempor, sunt aliqua put a bird on it squid single-origin coffee nulla assumenda shoreditch et. Nihil anim keffiyeh helvetica, craft beer labore wes anderson cred nesciunt sapiente ea proident. Ad vegan excepteur butcher vice lomo. Leggings occaecat craft beer farm-to-table, raw denim aesthetic synth nesciunt you probably haven't heard of them accusamus labore sustainable VHS.
</div>
</div>
</div>
<div class="panel panel-inverse">
<div class="panel-heading">
<h3 class="panel-title">
<a class="accordion-toggle" data-toggle="collapse" data-parent="#accordion" href="#collapseThree">
<i class="fa fa-plus-circle pull-right"></i>
Collapsible Group Item #3
</a>
</h3>
</div>
<div id="collapseThree" class="panel-collapse collapse">
<div class="panel-body">
Anim pariatur cliche reprehenderit, enim eiusmod high life accusamus terry richardson ad squid. 3 wolf moon officia aute, non cupidatat skateboard dolor brunch. Food truck quinoa nesciunt laborum eiusmod. Brunch 3 wolf moon tempor, sunt aliqua put a bird on it squid single-origin coffee nulla assumenda shoreditch et. Nihil anim keffiyeh helvetica, craft beer labore wes anderson cred nesciunt sapiente ea proident. Ad vegan excepteur butcher vice lomo. Leggings occaecat craft beer farm-to-table, raw denim aesthetic synth nesciunt you probably haven't heard of them accusamus labore sustainable VHS.
</div>
</div>
</div>
<div class="panel panel-inverse">
<div class="panel-heading">
<h3 class="panel-title">
<a class="accordion-toggle" data-toggle="collapse" data-parent="#accordion" href="#collapseFour">
<i class="fa fa-plus-circle pull-right"></i>
Collapsible Group Item #4
</a>
</h3>
</div>
<div id="collapseFour" class="panel-collapse collapse">
<div class="panel-body">
Anim pariatur cliche reprehenderit, enim eiusmod high life accusamus terry richardson ad squid. 3 wolf moon officia aute, non cupidatat skateboard dolor brunch. Food truck quinoa nesciunt laborum eiusmod. Brunch 3 wolf moon tempor, sunt aliqua put a bird on it squid single-origin coffee nulla assumenda shoreditch et. Nihil anim keffiyeh helvetica, craft beer labore wes anderson cred nesciunt sapiente ea proident. Ad vegan excepteur butcher vice lomo. Leggings occaecat craft beer farm-to-table, raw denim aesthetic synth nesciunt you probably haven't heard of them accusamus labore sustainable VHS.
</div>
</div>
</div>
<div class="panel panel-inverse">
<div class="panel-heading">
<h3 class="panel-title">
<a class="accordion-toggle" data-toggle="collapse" data-parent="#accordion" href="#collapseFive">
<i class="fa fa-plus-circle pull-right"></i>
Collapsible Group Item #5
</a>
</h3>
</div>
<div id="collapseFive" class="panel-collapse collapse">
<div class="panel-body">
Anim pariatur cliche reprehenderit, enim eiusmod high life accusamus terry richardson ad squid. 3 wolf moon officia aute, non cupidatat skateboard dolor brunch. Food truck quinoa nesciunt laborum eiusmod. Brunch 3 wolf moon tempor, sunt aliqua put a bird on it squid single-origin coffee nulla assumenda shoreditch et. Nihil anim keffiyeh helvetica, craft beer labore wes anderson cred nesciunt sapiente ea proident. Ad vegan excepteur butcher vice lomo. Leggings occaecat craft beer farm-to-table, raw denim aesthetic synth nesciunt you probably haven't heard of them accusamus labore sustainable VHS.
</div>
</div>
</div>
<div class="panel panel-inverse">
<div class="panel-heading">
<h3 class="panel-title">
<a class="accordion-toggle" data-toggle="collapse" data-parent="#accordion" href="#collapseSix">
<i class="fa fa-plus-circle pull-right"></i>
Collapsible Group Item #6
</a>
</h3>
</div>
<div id="collapseSix" class="panel-collapse collapse">
<div class="panel-body">
Anim pariatur cliche reprehenderit, enim eiusmod high life accusamus terry richardson ad squid. 3 wolf moon officia aute, non cupidatat skateboard dolor brunch. Food truck quinoa nesciunt laborum eiusmod. Brunch 3 wolf moon tempor, sunt aliqua put a bird on it squid single-origin coffee nulla assumenda shoreditch et. Nihil anim keffiyeh helvetica, craft beer labore wes anderson cred nesciunt sapiente ea proident. Ad vegan excepteur butcher vice lomo. Leggings occaecat craft beer farm-to-table, raw denim aesthetic synth nesciunt you probably haven't heard of them accusamus labore sustainable VHS.
</div>
</div>
</div>
<div class="panel panel-inverse">
<div class="panel-heading">
<h3 class="panel-title">
<a class="accordion-toggle" data-toggle="collapse" data-parent="#accordion" href="#collapseSeven">
<i class="fa fa-plus-circle pull-right"></i>
Collapsible Group Item #7
</a>
</h3>
</div>
<div id="collapseSeven" class="panel-collapse collapse">
<div class="panel-body">
Anim pariatur cliche reprehenderit, enim eiusmod high life accusamus terry richardson ad squid. 3 wolf moon officia aute, non cupidatat skateboard dolor brunch. Food truck quinoa nesciunt laborum eiusmod. Brunch 3 wolf moon tempor, sunt aliqua put a bird on it squid single-origin coffee nulla assumenda shoreditch et. Nihil anim keffiyeh helvetica, craft beer labore wes anderson cred nesciunt sapiente ea proident. Ad vegan excepteur butcher vice lomo. Leggings occaecat craft beer farm-to-table, raw denim aesthetic synth nesciunt you probably haven't heard of them accusamus labore sustainable VHS.
</div>
</div>
</div>
</div>
</div>
<!-- end col-6 -->
</div>
<!-- end row -->
</div>
<!-- end #content -->
<!-- begin scroll to top btn -->
<a href="javascript:;" class="btn btn-icon btn-circle btn-success btn-scroll-to-top fade" data-click="scroll-top"><i class="fa fa-angle-up"></i></a>
<!-- end scroll to top btn -->
</div>
<!-- end page container -->
<!-- ================== BEGIN BASE JS ================== -->
<script src="assets/plugins/jquery-1.7.2/jquery-1.7.2.js"></script>
<script src="assets/plugins/jquery-ui-1.10.4/ui/minified/jquery-ui.min.js"></script>
<script src="assets/plugins/bootstrap-3.1.1/js/bootstrap.min.js"></script>
<script src="assets/plugins/slimscroll/jquery.slimscroll.min.js"></script>
<!-- ================== END BASE JS ================== -->
<!-- ================== BEGIN PAGE LEVEL JS ================== -->
<script src="assets/js/apps.js"></script>
<!-- ================== END PAGE LEVEL JS ================== -->
<script>
$(document).ready(function() {
App.init();
});
</script>
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','//www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-53034621-1', 'sean-theme.com');
ga('send', 'pageview');
</script>
</body>
</html>
| {
"content_hash": "f0125a4d5c4dab776d3b3c5ab930f13f",
"timestamp": "",
"source": "github",
"line_count": 579,
"max_line_length": 611,
"avg_line_length": 49.96718480138169,
"alnum_prop": 0.5737444263938336,
"repo_name": "Simon-Lau/finder",
"id": "236ec616bfed192b0a500578612713be36efa618",
"size": "28931",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "htdocs/ColorAdmin/ui_tabs_accordions.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "189"
},
{
"name": "CSS",
"bytes": "193204"
},
{
"name": "HTML",
"bytes": "1696298"
},
{
"name": "JavaScript",
"bytes": "3074132"
},
{
"name": "Makefile",
"bytes": "763"
},
{
"name": "PHP",
"bytes": "507195"
},
{
"name": "Ruby",
"bytes": "1536"
},
{
"name": "Shell",
"bytes": "976"
},
{
"name": "Smarty",
"bytes": "132407"
}
],
"symlink_target": ""
} |
"""This module provides the TimeSeq class.
"""
import logging, csv, copy
from Transforms import SeqXform
class TimeSeq:
"""A class for representing data indexed by time.
"""
def __init__(self,columnNames,data):
self.__colIndexByName = {}
self.__columnNames = list(columnNames)
self.__ResetNameForColumnDict()
self.data =map(list,data) #convert from list of tuples to list of lists
assert 0==len(self.data) or (
len(self.data[0])==len(self.__columnNames)),"""
Wrong number of columnNames for data. """
def __ResetNameForColumnDict(self):
"""
This function recreates the __colIndexByName dictionary. The
__colIndexByName dictionary maps names to column indexes and is used
by the GetColIndex function quickly look up the index for a
column name
"""
self.__colIndexByName = {}
for i in range(len(self.__columnNames)):
self.__colIndexByName[self.__columnNames[i]] = i
def RemoveAllFieldsExcept(self,fieldsToKeep):
"""Remove all columns except for ones with the given indexes.
INPUTS:
-- fieldsToKeep: List of integer columns to keep.
-------------------------------------------------------
PURPOSE: Removes all fields except the ones given.
"""
fieldsToKeep = sorted([(
f if isinstance(f, (int, long)) else self.GetColIndex(f))
for f in fieldsToKeep])
oldColumnNames = self.__columnNames
self.__columnNames = []
for i in fieldsToKeep:
self.__columnNames.append(oldColumnNames[i])
oldData = self.data
self.data = [None]*len(oldData)
append = self.data.append
for row in range(len(oldData)):
oldRow = oldData[row]
self.data[row] = []
append = self.data[row].append
for i in fieldsToKeep:
append(oldRow[i])
del oldData
del oldColumnNames
self.__ResetNameForColumnDict()
def RemoveFieldsNamed(self,fieldsToRemove):
"""Remove columns with the given names.
INPUTS:
-- fieldsToRemove: List of strings representing columns to remove.
-------------------------------------------------------
PURPOSE: Removes the given columns as shown in the following
example:
>>> exampleQ = TimeSeq(['day','qnt','price','dm'],[[1,2,2,4],[2,5,5,6]])
>>> print exampleQ.data
[[1, 2, 2, 4], [2, 5, 5, 6]]
>>> exampleQ.RemoveFieldsNamed(['qnt','dm'])
>>> print exampleQ.GetColumnNames()
['day', 'price']
>>> print exampleQ.data
[[1, 2], [2, 5]]
"""
fieldsToRemove = set(fieldsToRemove)
fieldsToKeep = [i for i in range(len(self.__columnNames))
if self.__columnNames[i] not in fieldsToRemove]
return self.RemoveAllFieldsExcept(fieldsToKeep)
@classmethod
def ReadFromSimpleCSVFile(
cls, filename):
"""
Creates a TimeSeq object by reading data from a simple format
CSV file.
The format of the file is simply one line of header, and the
rest are data. There is no blank line between the header and
the data.
INPUTS:
-- fileName; string name of CSV file to read
RETURNS:
-- a TimeSeq object.
"""
fd = open(filename, "r")
reader = csv.reader(fd)
fields = reader.next()
rows = []
for line in reader:
if len(line) == 0:
continue
if len(line) < len(fields):
line.extend([None]*(len(fields)-len(line)))
rows.append(line)
return TimeSeq(fields, rows)
def WriteToSimpleCSVFile(self, filename):
"""Writes a TimeSeq object to a simple CSV file format
The format of the file is simply one line of header, and the
rest are data. There is no blank line between the header and
the data.
INPUTS:
-- fileName; string name of CSV file to write to.
"""
fd = open(filename, "w")
writer = csv.writer(fd)
writer.writerow(self.GetColumnNames())
for line in self.data:
writer.writerow(line)
def AddFields(self,transformList,lines=None):
"""
INPUTS:
-- transformList: A list of SeqXform objects to add
to this sequence.
-- lines: A generator indicating which lines of
of self.data to process. If this is None,
then all lines in self.data are processed.
-------------------------------------------------------
PURPOSE: Add the fields corresponding to the transforms in
transformList to this sequence. Specifically, this function
adds columns to this sequence corresponding to the outputFields
for the transfomrs in transformList and then populates these
fields by calling the transforms sequentially on every row.
Only those lines indicating by lines are processed.
Note that you can have later transforms refer to earlier
transforms.
The following example illustrates usage how the lines argument can
be combined with a generator such as the WeeklyDateJumper to process
data on a weekly level:
>>> import datetime
>>> import Sequence
>>> from Transforms import SeqXform
>>> exampleQ = Sequence.TimeSeq(['day','price','quantity'],
... [[datetime.date(2000,1,1)+datetime.timedelta(i),i,i+1] for i in range(5)])
>>> class ExampleTransform(SeqXform):
... def ProcessRow(self,args): return [args['price'] * args['quantity']]
...
>>> exampleQ.AddFields([ExampleTransform([],['product_v2'])],
... (num for (num, line) in enumerate(exampleQ.data) if line[0].weekday() == 1))
>>> print '\\n'.join(map(str,exampleQ.data))
[datetime.date(2000, 1, 1), 0, 1, None]
[datetime.date(2000, 1, 2), 1, 2, None]
[datetime.date(2000, 1, 3), 2, 3, None]
[datetime.date(2000, 1, 4), 3, 4, 12]
[datetime.date(2000, 1, 5), 4, 5, None]
"""
transformList = [t for t in transformList
if not getattr(t,'doNotProcess',False)]
if (len(transformList)==0): return
self.ComplainAboutNonTransforms(transformList)
logging.debug('Applying transforms: %s.' %
', '.join([str(t) for t in transformList]))
nameList = sum([t.outputFields for t in transformList],[])
adderList =sum([[str(t)]*len(t.outputFields) for t in transformList],[])
self.AddBlankColumns(nameList,adderList)
txRange = range(len(transformList))
txSlice = []
for transform in transformList:
startCol = self.GetColIndex(transform.outputFields[0])
endCol = self.GetColIndex(transform.outputFields[-1])
assert startCol is not None and endCol is not None
txSlice.append(slice(startCol,endCol+1))
for t in transformList: t.Startup(self)
numCols = len(self.__columnNames)
if (None == lines): lines = xrange(len(self.data))
for i in lines:
args = {}
for field in range(numCols):
args[self.__columnNames[field]] = self.data[i][field]
for txNum in txRange:
result = SeqXform.ProcessTransformList(
[transformList[txNum]],args,self,i)
self.data[i][txSlice[txNum]] = result
for t in transformList: t.Shutdown(self)
@staticmethod
def ComplainAboutNonTransforms(transformList):
"Complain about things in input list not instances of SeqXform"
bads = [(i,t) for (i,t) in enumerate(transformList)
if not isinstance(t, SeqXform)]
if (bads):
raise TypeError('''
The following elements were not SeqXform instances:\n%s
''' % '\n'.join(['element %i: %s' % (i, t) for (i,t) in bads]))
def _regr_test_AddFields(self):
"""
>>> import datetime
>>> from Transforms import SeqXform
>>> from Sequence import *
>>> exampleQ = TimeSeq(['day','price','quantity'],
... [[datetime.date(2000,1,1)+datetime.timedelta(i),i,i+1] for i in range(11)])
>>> class ExampleTransform(SeqXform):
... def ProcessRow(self,args): return [args['price'] * args['quantity']]
>>> exampleQ.AddFields([ExampleTransform([],['product'])])
>>> print '\\n'.join(map(str,exampleQ.data))
[datetime.date(2000, 1, 1), 0, 1, 0]
[datetime.date(2000, 1, 2), 1, 2, 2]
[datetime.date(2000, 1, 3), 2, 3, 6]
[datetime.date(2000, 1, 4), 3, 4, 12]
[datetime.date(2000, 1, 5), 4, 5, 20]
[datetime.date(2000, 1, 6), 5, 6, 30]
[datetime.date(2000, 1, 7), 6, 7, 42]
[datetime.date(2000, 1, 8), 7, 8, 56]
[datetime.date(2000, 1, 9), 8, 9, 72]
[datetime.date(2000, 1, 10), 9, 10, 90]
[datetime.date(2000, 1, 11), 10, 11, 110]
>>> exampleQ.AddFields([ExampleTransform([],['product_v2'])],[0,2])
>>> print '\\n'.join(map(str,exampleQ.data))
[datetime.date(2000, 1, 1), 0, 1, 0, 0]
[datetime.date(2000, 1, 2), 1, 2, 2, None]
[datetime.date(2000, 1, 3), 2, 3, 6, 6]
[datetime.date(2000, 1, 4), 3, 4, 12, None]
[datetime.date(2000, 1, 5), 4, 5, 20, None]
[datetime.date(2000, 1, 6), 5, 6, 30, None]
[datetime.date(2000, 1, 7), 6, 7, 42, None]
[datetime.date(2000, 1, 8), 7, 8, 56, None]
[datetime.date(2000, 1, 9), 8, 9, 72, None]
[datetime.date(2000, 1, 10), 9, 10, 90, None]
[datetime.date(2000, 1, 11), 10, 11, 110, None]
>>> exampleQ.AddFields([ExampleTransform([],['product_v3'])],
... lines=(n for (n, line) in enumerate(exampleQ.data) if line[0].weekday()==1))
>>> print '\\n'.join(map(str,exampleQ.data))
[datetime.date(2000, 1, 1), 0, 1, 0, 0, None]
[datetime.date(2000, 1, 2), 1, 2, 2, None, None]
[datetime.date(2000, 1, 3), 2, 3, 6, 6, None]
[datetime.date(2000, 1, 4), 3, 4, 12, None, 12]
[datetime.date(2000, 1, 5), 4, 5, 20, None, None]
[datetime.date(2000, 1, 6), 5, 6, 30, None, None]
[datetime.date(2000, 1, 7), 6, 7, 42, None, None]
[datetime.date(2000, 1, 8), 7, 8, 56, None, None]
[datetime.date(2000, 1, 9), 8, 9, 72, None, None]
[datetime.date(2000, 1, 10), 9, 10, 90, None, None]
[datetime.date(2000, 1, 11), 10, 11, 110, None, 110]
"""
def GetColIndex(self,name):
"""
INPUTS:
-- name: String representing name of a column to lookup.
RETURNS: Integer representing index for the named column or
None if the column is not present.
"""
simpleCol = self.__colIndexByName.get(name,None)
return simpleCol
def NameForColumn(self,index):
"self.NameForColumn(index) returns the name of the column at index."
return self.__columnNames[index]
def GetColumnNames(self):
"GetColumnNames(self): Returns names of columns in this sequence."
return copy.deepcopy(self.__columnNames)
def AddBlankColumns(self,nameList,adderList=None,default=None,
startingPos=-1):
"""Add blank columns to this sequence.
INPUTS:
-- nameList: List of names for the columns to add.
-- adderList: List of strings (one for each element in nameList)
indicating who is adding the given name. This is
optional and can be left as None.
-- default: Value to add for new columns.
-- startingPos: Integer indicating the column number at which
to insert the new columns:
-1 indicates the last column.
0 indicates the first column.
For example, inserting 'NewColumn' to a columnList of
['event_date', 'val1'] and at startingPos of 0 makes
the column List ['NewColumn', 'event_date', 'val1']
-------------------------------------------------------
RETURNS: List of indexes for the new columns.
-------------------------------------------------------
PURPOSE: This function is useful if you want to add new
columns of data. First you call this to create the
columns and then you can set the values accordingly.
The following illustrates example usage:
>>> exampleQ = TimeSeq(['day','qnt','price'],[[1,2,4],[8,5,9],[7,0,6]])
>>> exampleQ.AddBlankColumns(['foo','bar'])
[3, 4]
>>> print exampleQ.GetColumnNames()
['day', 'qnt', 'price', 'foo', 'bar']
>>> print exampleQ.data[-1]
[7, 0, 6, None, None]
>>> exampleQ.AddBlankColumns(['test1', 'test2'], startingPos = 1)
[1, 2]
>>> print exampleQ.GetColumnNames()
['day', 'test1', 'test2', 'qnt', 'price', 'foo', 'bar']
>>> print exampleQ.data[-1]
[7, None, None, 0, 6, None, None]
"""
if (None == adderList): adderList = ['unknown']*len(nameList)
numNames = len(nameList)
if (numNames != len(set(nameList))): # duplicates exist in nameList
for i in range(len(nameList)):
if (nameList[i] in nameList[(i+1):]):
dupInd = nameList[(i+1):].index(nameList[i])
raise Exception(
"Name %s added at i=%i by %s and at i=%i by %s." % (
nameList[i],i,adderList[i],dupInd,adderList[dupInd]))
raise Exception("Duplicate indexes in nameList.")
for name in nameList:
assert not isinstance(name,(tuple,list)),'Names should be strings.'
index = self.GetColIndex(name)
if (None != index):
raise Exception(
"Column %s can't be added; it already exists at index %i."
% (name,index))
assert startingPos in [-1] + range(len(self.__columnNames))
if startingPos == -1:
startingPos = len(self.__columnNames)
for name in nameList:
self.__columnNames.insert(startingPos, name)
for line in self.data:
line.insert(startingPos, default)
startingPos+=1
self.__ResetNameForColumnDict() # reset the column name dict
return range(startingPos- len(nameList), startingPos)
def RenameColumns(self,oldNames,newNames):
"""Rename columns.
INPUTS:
-- oldNames: List of old names for columns.
-- newNames: List of new names for columns.
-------------------------------------------------------
PURPOSE: For each i, this function renames column oldNames[i]
to have the name newNames[i] as illustrated below:
>>> import Sequence
>>> exampleQ = Sequence.TimeSeq(['x','y','z'],[[0,1,2]])
>>> exampleQ.RenameColumns(['x','z'],['a','b'])
>>> print exampleQ.GetColumnNames()
['a', 'y', 'b']
"""
if (len(oldNames)!=len(newNames)):
raise Exception("oldNames and newNames must have the same length")
if (len(oldNames) != len(set(oldNames))):
raise Exception("oldNames list contains duplicates")
if (len(newNames) != len(set(newNames))):
raise Exception("newNames list contains duplicates")
indexes = [self.GetColIndex(n) for n in oldNames]
if (None in indexes):
raise Exception("No column named %s."%oldNames[indexes.index(None)])
for i in range(len(indexes)):
if (not isinstance(newNames[i],str)):
raise Exception('Name %s is not a string.' % str(newNames[i]))
self.__columnNames[indexes[i]] = newNames[i]
self.__ResetNameForColumnDict() # reset the column name dict
def CopyColumns(self,oldNames,newNames):
"""Copy columns.
INPUTS:
-- oldNames: List of old names for columns.
-- newNames: List of new names for columns.
-------------------------------------------------------
PURPOSE: For each i, this function copies column oldNames[i]
into a new column named newNames[i] as shown below:
>>> import Sequence
>>> exampleQ = Sequence.TimeSeq(['x','y','z'],[[0,1,2],[3,4,5]])
>>> exampleQ.CopyColumns(['x','z'],['a','b'])
>>> print exampleQ.GetColumnNames()
['x', 'y', 'z', 'a', 'b']
>>> print exampleQ.data
[[0, 1, 2, 0, 2], [3, 4, 5, 3, 5]]
"""
if (len(oldNames)!=len(newNames)):
raise Exception("oldNames and newNames must have the same length")
if (len(oldNames) != len(set(oldNames))):
raise Exception("oldNames list contains duplicates")
if (len(newNames) != len(set(newNames))):
raise Exception("newNames list contains duplicates")
indexes = [self.GetColIndex(n) for n in oldNames]
if (None in indexes):
raise Exception("No column named %s."%oldNames[indexes.index(None)])
for name in newNames:
assert not isinstance(name,(tuple,list)),'Names should be strings.'
index = self.GetColIndex(name)
if (None != index):
raise Exception(
"Column %s can't be added; it already exists at index %i."
% (name,index))
self.__columnNames.extend(newNames) # add the new names
self.__ResetNameForColumnDict() # reset the column name dict
self.data = [line + [line[i] for i in indexes] for line in self.data]
def _test():
"Test docstrings in module."
import doctest
doctest.testmod()
if __name__ == "__main__":
_test()
print 'Test finished.'
| {
"content_hash": "82d4b127e30778db2826b815ed8c3520",
"timestamp": "",
"source": "github",
"line_count": 479,
"max_line_length": 80,
"avg_line_length": 38.979123173277664,
"alnum_prop": 0.539606876974988,
"repo_name": "eiriks/pyvol",
"id": "638856db6ba3005e493f6f0aed688d3ac2b84cc7",
"size": "18671",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "pyvol/tseries/Sequence.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "139522"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<!--
| Generated by Apache Maven Doxia at 2014-11-25
| Rendered using Apache Maven Fluido Skin 1.3.0
-->
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta name="Date-Revision-yyyymmdd" content="20141125" />
<meta http-equiv="Content-Language" content="en" />
<title>Trap HTTP Sun Shared Library - About</title>
<link rel="stylesheet" href="./css/apache-maven-fluido-1.3.0.min.css" />
<link rel="stylesheet" href="./css/site.css" />
<link rel="stylesheet" href="./css/print.css" media="print" />
<script type="text/javascript" src="./js/apache-maven-fluido-1.3.0.min.js"></script>
</head>
<body class="topBarDisabled">
<div class="container-fluid">
<div id="banner">
<div class="pull-left">
<div id="bannerLeft">
<h2>Trap HTTP Sun Shared Library</h2>
</div>
</div>
<div class="pull-right"> </div>
<div class="clear"><hr/></div>
</div>
<div id="breadcrumbs">
<ul class="breadcrumb">
<li id="publishDate">Last Published: 2014-11-25</li>
<li class="divider">|</li> <li id="projectVersion">Version: 1.3</li>
<li class="divider">|</li> <li class="">
<a href="../../../index.html" title="Trap">
Trap</a>
</li>
<li class="divider ">/</li>
<li class="">
<a href="../../" title="TrAP Transports">
TrAP Transports</a>
</li>
<li class="divider ">/</li>
<li class="">
<a href="../" title="TrAP HTTP Transports">
TrAP HTTP Transports</a>
</li>
<li class="divider ">/</li>
<li class="">
<a href="./" title="Trap HTTP Sun Shared Library">
Trap HTTP Sun Shared Library</a>
</li>
<li class="divider ">/</li>
<li class="">About</li>
</ul>
</div>
<div class="row-fluid">
<div id="leftColumn" class="span3">
<div class="well sidebar-nav">
<ul class="nav nav-list">
<li class="nav-header">Trap 1.3</li>
<li>
<a href="../../../index.html" title="Introduction">
<i class="none"></i>
Introduction</a>
</li>
<li>
<a href="../../../trap-api/quickstart.html" title="Java Quickstart">
<i class="none"></i>
Java Quickstart</a>
</li>
<li>
<a href="../../../trap-js/index.html" title="JavaScript Quickstart">
<i class="none"></i>
JavaScript Quickstart</a>
</li>
<li>
<a href="../../../channels.html" title="Channels">
<i class="none"></i>
Channels</a>
</li>
<li>
<a href="../../../configuration.html" title="Configuration">
<i class="none"></i>
Configuration</a>
</li>
<li class="nav-header">Language Specific Documentation</li>
<li>
<a href="../../../trap-api/index.html" title="Java">
<i class="none"></i>
Java</a>
</li>
<li>
<a href="../../../trap-js/index.html" title="JavaScript">
<i class="none"></i>
JavaScript</a>
</li>
<li class="nav-header">Project Documentation</li>
<li>
<a href="project-info.html" title="Project Information">
<i class="icon-chevron-down"></i>
Project Information</a>
<ul class="nav nav-list">
<li class="active">
<a href="#"><i class="none"></i>About</a>
</li>
<li>
<a href="plugin-management.html" title="Plugin Management">
<i class="none"></i>
Plugin Management</a>
</li>
<li>
<a href="distribution-management.html" title="Distribution Management">
<i class="none"></i>
Distribution Management</a>
</li>
<li>
<a href="dependency-info.html" title="Dependency Information">
<i class="none"></i>
Dependency Information</a>
</li>
<li>
<a href="dependency-convergence.html" title="Dependency Convergence">
<i class="none"></i>
Dependency Convergence</a>
</li>
<li>
<a href="source-repository.html" title="Source Repository">
<i class="none"></i>
Source Repository</a>
</li>
<li>
<a href="mail-lists.html" title="Mailing Lists">
<i class="none"></i>
Mailing Lists</a>
</li>
<li>
<a href="issue-tracking.html" title="Issue Tracking">
<i class="none"></i>
Issue Tracking</a>
</li>
<li>
<a href="integration.html" title="Continuous Integration">
<i class="none"></i>
Continuous Integration</a>
</li>
<li>
<a href="plugins.html" title="Project Plugins">
<i class="none"></i>
Project Plugins</a>
</li>
<li>
<a href="license.html" title="Project License">
<i class="none"></i>
Project License</a>
</li>
<li>
<a href="dependency-management.html" title="Dependency Management">
<i class="none"></i>
Dependency Management</a>
</li>
<li>
<a href="team-list.html" title="Project Team">
<i class="none"></i>
Project Team</a>
</li>
<li>
<a href="project-summary.html" title="Project Summary">
<i class="none"></i>
Project Summary</a>
</li>
<li>
<a href="dependencies.html" title="Dependencies">
<i class="none"></i>
Dependencies</a>
</li>
</ul>
</li>
<li>
<a href="project-reports.html" title="Project Reports">
<i class="icon-chevron-right"></i>
Project Reports</a>
</li>
</ul>
<form id="search-form" action="http://www.google.com/search" method="get" >
<input value="ericssonresearch.github.io/trap/transports/http/http-utils-sun/" name="sitesearch" type="hidden"/>
<input class="search-query" name="q" id="query" type="text" />
</form>
<script type="text/javascript" src="http://www.google.com/coop/cse/brand?form=search-form"></script>
<hr class="divider" />
<div id="poweredBy">
<div class="clear"></div>
<div class="clear"></div>
<div class="clear"></div>
<a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy">
<img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" />
</a>
</div>
</div>
</div>
<div id="bodyColumn" class="span9" >
<div class="section">
<h2>About Trap HTTP Sun Shared Library<a name="About_Trap_HTTP_Sun_Shared_Library"></a></h2><a name="About_Trap_HTTP_Sun_Shared_Library"></a>
<p>This library contains shared code between the Sun HTTP server and client transports.</p></div>
</div>
</div>
</div>
<hr/>
<footer>
<div class="container-fluid">
<div class="row span12">Copyright © 2014
<a href="https://www.ericsson.com">Ericsson AB</a>.
All Rights Reserved.
</div>
</div>
</footer>
</body>
</html> | {
"content_hash": "584aede72b0a9cc75b8635bfca26e82d",
"timestamp": "",
"source": "github",
"line_count": 298,
"max_line_length": 344,
"avg_line_length": 32.89261744966443,
"alnum_prop": 0.3927769842889206,
"repo_name": "princeofdarkness76/trap",
"id": "0c1f7aa9b469260ad2d6d3ab9b231171145e0666",
"size": "9802",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "transports/http/http-utils-sun/index.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "203809"
},
{
"name": "HTML",
"bytes": "40284461"
},
{
"name": "Java",
"bytes": "2358762"
},
{
"name": "JavaScript",
"bytes": "390348"
},
{
"name": "Shell",
"bytes": "7473"
}
],
"symlink_target": ""
} |
=============
Abuse Reports
=============
The following API endpoint covers abuse reporting
---------------------------------
Submitting an add-on abuse report
---------------------------------
.. _`v3-addonabusereport-create`:
The following API endpoint allows an abuse report to be submitted for an Add-on,
either listed on https://addons.mozilla.org or not.
Authentication is not required, but is recommended so reports can be responded
to if nessecary.
.. http:post:: /api/v3/abuse/report/addon/
.. _v3-addonabusereport-create-request:
:<json string addon: The id, slug, or guid of the add-on to report for abuse (required).
:<json string message: The body/content of the abuse report (required).
:>json object|null reporter: The user who submitted the report, if authenticated.
:>json int reporter.id: The id of the user who submitted the report.
:>json string reporter.name: The name of the user who submitted the report.
:>json string reporter.username: The username of the user who submitted the report.
:>json string reporter.url: The link to the profile page for of the user who submitted the report.
:>json object addon: The add-on reported for abuse.
:>json string addon.guid: The add-on `extension identifier <https://developer.mozilla.org/en-US/Add-ons/Install_Manifests#id>`_.
:>json int|null addon.id: The add-on id on AMO. If the guid submitted didn't match a known add-on on AMO, then null.
:>json string|null addon.slug: The add-on slug. If the guid submitted didn't match a known add-on on AMO, then null.
:>json string message: The body/content of the abuse report.
------------------------------
Submitting a user abuse report
------------------------------
.. _`v3-userabusereport-create`:
The following API endpoint allows an abuse report to be submitted for a user account
on https://addons.mozilla.org. Authentication is not required, but is recommended
so reports can be responded to if nessecary.
.. http:post:: /api/v3/abuse/report/user/
.. _v3-userabusereport-create-request:
:<json string user: The id or username of the user to report for abuse (required).
:<json string message: The body/content of the abuse report (required).
:>json object|null reporter: The user who submitted the report, if authenticated.
:>json int reporter.id: The id of the user who submitted the report.
:>json string reporter.name: The name of the user who submitted the report.
:>json string reporter.url: The link to the profile page for of the user who submitted the report.
:>json string reporter.username: The username of the user who submitted the report.
:>json object user: The user reported for abuse.
:>json int user.id: The id of the user reported.
:>json string user.name: The name of the user reported.
:>json string user.url: The link to the profile page for of the user reported.
:>json string user.username: The username of the user reported.
:>json string message: The body/content of the abuse report.
| {
"content_hash": "94d7ee00a53e13e9719c82fa191a0ac8",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 132,
"avg_line_length": 49.225806451612904,
"alnum_prop": 0.6988859764089121,
"repo_name": "atiqueahmedziad/addons-server",
"id": "c8e8d7ee3da59535ae2c31ecfefd0eb1922f9175",
"size": "3052",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "docs/topics/api/v3_legacy/abuse.rst",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "810065"
},
{
"name": "Dockerfile",
"bytes": "2868"
},
{
"name": "HTML",
"bytes": "599024"
},
{
"name": "JavaScript",
"bytes": "1070220"
},
{
"name": "Makefile",
"bytes": "820"
},
{
"name": "PLSQL",
"bytes": "1074"
},
{
"name": "PLpgSQL",
"bytes": "2381"
},
{
"name": "Python",
"bytes": "5272277"
},
{
"name": "SQLPL",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "11171"
},
{
"name": "Smarty",
"bytes": "1497"
}
],
"symlink_target": ""
} |
A simple tic tac toe game made for android phones.
You can download the .APK from [my google drive](https://drive.google.com/open?id=0B-JDUFjRTKZVOGZVMnV6U3N5UDQ)
 | {
"content_hash": "8c9c99428d172038faf7554a66a0af61",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 119,
"avg_line_length": 56.8,
"alnum_prop": 0.7746478873239436,
"repo_name": "CptDave/Tic-Tac-Toe-Android",
"id": "13bfc95439bc1ad66382a85b5d55175f7668ba0e",
"size": "306",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "38719"
}
],
"symlink_target": ""
} |
import json
import os
import socket
import time
import uuid
from django.conf import settings
from django.template.loader import render_to_string
from django.utils import timezone
from django.utils.html import escape
import requests
from urllib.parse import quote, urlencode
from hc.accounts.models import Profile
from hc.api.schemas import telegram_migration
from hc.front.templatetags.hc_extras import sortchecks
from hc.lib import emails, jsonschema
from hc.lib.date import format_duration
from hc.lib.string import replace
try:
import apprise
except ImportError:
# Enforce
settings.APPRISE_ENABLED = False
def tmpl(template_name, **ctx) -> str:
template_path = "integrations/%s" % template_name
# \xa0 is non-breaking space. It causes SMS messages to use UCS2 encoding
# and cost twice the money.
return render_to_string(template_path, ctx).strip().replace("\xa0", " ")
class TransportError(Exception):
def __init__(self, message, permanent=False) -> None:
self.message = message
self.permanent = permanent
class Transport(object):
def __init__(self, channel):
self.channel = channel
def notify(self, check, notification=None) -> None:
""" Send notification about current status of the check.
This method raises TransportError on error, and returns None
on success.
"""
raise NotImplementedError()
def is_noop(self, check) -> bool:
""" Return True if transport will ignore check's current status.
This method is overridden in Webhook subclass where the user can
configure webhook urls for "up" and "down" events, and both are
optional.
"""
return False
def down_checks(self, check):
""" Return a sorted list of other checks in the same project that are down.
If there are no other hecks in the project, return None instead of empty list.
Templates can check for None to decide whether to show or not show the
"All other checks are up" note.
"""
siblings = self.channel.project.check_set.exclude(id=check.id)
if not siblings.exists():
return None
down_siblings = list(siblings.filter(status="down"))
sortchecks(down_siblings, "name")
return down_siblings
class Email(Transport):
def notify(self, check, notification=None) -> None:
if not self.channel.email_verified:
raise TransportError("Email not verified")
unsub_link = self.channel.get_unsub_link()
headers = {
"List-Unsubscribe": "<%s>" % unsub_link,
"List-Unsubscribe-Post": "List-Unsubscribe=One-Click",
}
if notification:
headers["X-Status-Url"] = notification.status_url()
from hc.accounts.models import Profile
# If this email address has an associated account, include
# a summary of projects the account has access to
try:
profile = Profile.objects.get(user__email=self.channel.email_value)
projects = list(profile.projects())
except Profile.DoesNotExist:
projects = None
ctx = {
"check": check,
"ping": check.ping_set.order_by("created").last(),
"projects": projects,
"unsub_link": unsub_link,
}
emails.alert(self.channel.email_value, ctx, headers)
def is_noop(self, check) -> bool:
if check.status == "down":
return not self.channel.email_notify_down
else:
return not self.channel.email_notify_up
class Shell(Transport):
def prepare(self, template: str, check) -> str:
""" Replace placeholders with actual values. """
ctx = {
"$CODE": str(check.code),
"$STATUS": check.status,
"$NOW": timezone.now().replace(microsecond=0).isoformat(),
"$NAME": check.name,
"$TAGS": check.tags,
}
for i, tag in enumerate(check.tags_list()):
ctx["$TAG%d" % (i + 1)] = tag
return replace(template, ctx)
def is_noop(self, check) -> bool:
if check.status == "down" and not self.channel.cmd_down:
return True
if check.status == "up" and not self.channel.cmd_up:
return True
return False
def notify(self, check, notification=None) -> None:
if not settings.SHELL_ENABLED:
raise TransportError("Shell commands are not enabled")
if check.status == "up":
cmd = self.channel.cmd_up
elif check.status == "down":
cmd = self.channel.cmd_down
cmd = self.prepare(cmd, check)
code = os.system(cmd)
if code != 0:
raise TransportError("Command returned exit code %d" % code)
class HttpTransport(Transport):
@classmethod
def raise_for_response(cls, response):
# Subclasses can override this method to produce a more specific message.
raise TransportError(f"Received status code {response.status_code}")
@classmethod
def _request(cls, method, url, **kwargs) -> None:
options = dict(kwargs)
options["timeout"] = 10
if "headers" not in options:
options["headers"] = {}
if "User-Agent" not in options["headers"]:
options["headers"]["User-Agent"] = "healthchecks.io"
try:
r = requests.request(method, url, **options)
if r.status_code not in (200, 201, 202, 204):
cls.raise_for_response(r)
except requests.exceptions.Timeout:
# Well, we tried
raise TransportError("Connection timed out")
except requests.exceptions.ConnectionError:
raise TransportError("Connection failed")
@classmethod
def _request_with_retries(cls, method, url, use_retries=True, **kwargs) -> None:
start = time.time()
tries_left = 3 if use_retries else 1
while True:
try:
return cls._request(method, url, **kwargs)
except TransportError as e:
tries_left = 0 if e.permanent else tries_left - 1
# If we have no tries left *or* have already used more than
# 15 seconds of time then abort the retry loop by re-raising
# the exception:
if tries_left == 0 or time.time() - start > 15:
raise e
@classmethod
def get(cls, url, **kwargs) -> None:
cls._request_with_retries("get", url, **kwargs)
@classmethod
def post(cls, url, **kwargs) -> None:
cls._request_with_retries("post", url, **kwargs)
@classmethod
def put(cls, url, **kwargs) -> None:
cls._request_with_retries("put", url, **kwargs)
class Webhook(HttpTransport):
def prepare(self, template: str, check, urlencode=False, latin1=False) -> str:
""" Replace variables with actual values. """
def safe(s: str) -> str:
return quote(s) if urlencode else s
ctx = {
"$CODE": str(check.code),
"$STATUS": check.status,
"$NOW": safe(timezone.now().replace(microsecond=0).isoformat()),
"$NAME": safe(check.name),
"$TAGS": safe(check.tags),
}
for i, tag in enumerate(check.tags_list()):
ctx["$TAG%d" % (i + 1)] = safe(tag)
result = replace(template, ctx)
if latin1:
# Replace non-latin-1 characters with XML character references.
result = result.encode("latin-1", "xmlcharrefreplace").decode("latin-1")
return result
def is_noop(self, check) -> bool:
if check.status == "down" and not self.channel.url_down:
return True
if check.status == "up" and not self.channel.url_up:
return True
return False
def notify(self, check, notification=None) -> None:
if not settings.WEBHOOKS_ENABLED:
raise TransportError("Webhook notifications are not enabled.")
spec = self.channel.webhook_spec(check.status)
if not spec["url"]:
raise TransportError("Empty webhook URL")
url = self.prepare(spec["url"], check, urlencode=True)
headers = {}
for key, value in spec["headers"].items():
# Header values should contain ASCII and latin-1 only
headers[key] = self.prepare(value, check, latin1=True)
body = spec["body"]
if body:
body = self.prepare(body, check).encode()
# When sending a test notification, don't retry on failures.
use_retries = True
if notification and notification.owner is None:
use_retries = False # this is a test notification
if spec["method"] == "GET":
self.get(url, use_retries=use_retries, headers=headers)
elif spec["method"] == "POST":
self.post(url, use_retries=use_retries, data=body, headers=headers)
elif spec["method"] == "PUT":
self.put(url, use_retries=use_retries, data=body, headers=headers)
class Slack(HttpTransport):
@classmethod
def raise_for_response(cls, response):
message = f"Received status code {response.status_code}"
# If Slack returns 404, this endpoint is unlikely to ever work again
# https://api.slack.com/messaging/webhooks#handling_errors
permanent = response.status_code == 404
raise TransportError(message, permanent=permanent)
def notify(self, check, notification=None) -> None:
if self.channel.kind == "slack" and not settings.SLACK_ENABLED:
raise TransportError("Slack notifications are not enabled.")
if self.channel.kind == "mattermost" and not settings.MATTERMOST_ENABLED:
raise TransportError("Mattermost notifications are not enabled.")
text = tmpl("slack_message.json", check=check)
payload = json.loads(text)
self.post(self.channel.slack_webhook_url, json=payload)
class HipChat(HttpTransport):
def is_noop(self, check) -> bool:
return True
class Opsgenie(HttpTransport):
@classmethod
def raise_for_response(cls, response):
message = f"Received status code {response.status_code}"
try:
details = response.json().get("message")
if isinstance(details, str):
message += f' with a message: "{details}"'
except ValueError:
pass
raise TransportError(message)
def notify(self, check, notification=None) -> None:
if not settings.OPSGENIE_ENABLED:
raise TransportError("Opsgenie notifications are not enabled.")
headers = {
"Conent-Type": "application/json",
"Authorization": "GenieKey %s" % self.channel.opsgenie_key,
}
payload = {"alias": str(check.code), "source": settings.SITE_NAME}
if check.status == "down":
payload["tags"] = check.tags_list()
payload["message"] = tmpl("opsgenie_message.html", check=check)
payload["note"] = tmpl("opsgenie_note.html", check=check)
payload["description"] = tmpl("opsgenie_description.html", check=check)
url = "https://api.opsgenie.com/v2/alerts"
if self.channel.opsgenie_region == "eu":
url = "https://api.eu.opsgenie.com/v2/alerts"
if check.status == "up":
url += "/%s/close?identifierType=alias" % check.code
self.post(url, json=payload, headers=headers)
class PagerDuty(HttpTransport):
URL = "https://events.pagerduty.com/generic/2010-04-15/create_event.json"
def notify(self, check, notification=None) -> None:
if not settings.PD_ENABLED:
raise TransportError("PagerDuty notifications are not enabled.")
details = {
"Project": check.project.name,
"Total pings": check.n_pings,
"Last ping": tmpl("pd_last_ping.html", check=check),
}
if check.desc:
details["Description"] = check.desc
if check.tags:
details["Tags"] = ", ".join(check.tags_list())
if check.kind == "simple":
details["Period"] = format_duration(check.timeout)
if check.kind == "cron":
details["Schedule"] = check.schedule
description = tmpl("pd_description.html", check=check)
payload = {
"service_key": self.channel.pd_service_key,
"incident_key": str(check.code),
"event_type": "trigger" if check.status == "down" else "resolve",
"description": description,
"client": settings.SITE_NAME,
"client_url": check.details_url(),
"details": details,
}
self.post(self.URL, json=payload)
class PagerTree(HttpTransport):
def notify(self, check, notification=None) -> None:
if not settings.PAGERTREE_ENABLED:
raise TransportError("PagerTree notifications are not enabled.")
url = self.channel.value
headers = {"Conent-Type": "application/json"}
payload = {
"incident_key": str(check.code),
"event_type": "trigger" if check.status == "down" else "resolve",
"title": tmpl("pagertree_title.html", check=check),
"description": tmpl("pagertree_description.html", check=check),
"client": settings.SITE_NAME,
"client_url": settings.SITE_ROOT,
"tags": ",".join(check.tags_list()),
}
self.post(url, json=payload, headers=headers)
class PagerTeam(HttpTransport):
def is_noop(self, check) -> bool:
return True
class Pushbullet(HttpTransport):
def notify(self, check, notification=None) -> None:
text = tmpl("pushbullet_message.html", check=check)
url = "https://api.pushbullet.com/v2/pushes"
headers = {
"Access-Token": self.channel.value,
"Conent-Type": "application/json",
}
payload = {"type": "note", "title": settings.SITE_NAME, "body": text}
self.post(url, json=payload, headers=headers)
class Pushover(HttpTransport):
URL = "https://api.pushover.net/1/messages.json"
CANCEL_TMPL = "https://api.pushover.net/1/receipts/cancel_by_tag/%s.json"
def notify(self, check, notification=None) -> None:
pieces = self.channel.value.split("|")
user_key, down_prio = pieces[0], pieces[1]
# The third element, if present, is the priority for "up" events
up_prio = down_prio
if len(pieces) == 3:
up_prio = pieces[2]
from hc.api.models import TokenBucket
if not TokenBucket.authorize_pushover(user_key):
raise TransportError("Rate limit exceeded")
# If down events have the emergency priority,
# send a cancel call first
if check.status == "up" and down_prio == "2":
url = self.CANCEL_TMPL % check.unique_key
cancel_payload = {"token": settings.PUSHOVER_API_TOKEN}
self.post(url, data=cancel_payload)
ctx = {"check": check, "down_checks": self.down_checks(check)}
text = tmpl("pushover_message.html", **ctx)
title = tmpl("pushover_title.html", **ctx)
prio = up_prio if check.status == "up" else down_prio
payload = {
"token": settings.PUSHOVER_API_TOKEN,
"user": user_key,
"message": text,
"title": title,
"html": 1,
"priority": int(prio),
"tags": check.unique_key,
}
# Emergency notification
if prio == "2":
payload["retry"] = settings.PUSHOVER_EMERGENCY_RETRY_DELAY
payload["expire"] = settings.PUSHOVER_EMERGENCY_EXPIRATION
self.post(self.URL, data=payload)
class VictorOps(HttpTransport):
def notify(self, check, notification=None) -> None:
if not settings.VICTOROPS_ENABLED:
raise TransportError("Splunk On-Call notifications are not enabled.")
description = tmpl("victorops_description.html", check=check)
mtype = "CRITICAL" if check.status == "down" else "RECOVERY"
payload = {
"entity_id": str(check.code),
"message_type": mtype,
"entity_display_name": check.name_then_code(),
"state_message": description,
"monitoring_tool": settings.SITE_NAME,
}
self.post(self.channel.value, json=payload)
class Matrix(HttpTransport):
def get_url(self):
s = quote(self.channel.value)
url = settings.MATRIX_HOMESERVER
url += "/_matrix/client/r0/rooms/%s/send/m.room.message?" % s
url += urlencode({"access_token": settings.MATRIX_ACCESS_TOKEN})
return url
def notify(self, check, notification=None) -> None:
plain = tmpl("matrix_description.html", check=check)
formatted = tmpl("matrix_description_formatted.html", check=check)
payload = {
"msgtype": "m.text",
"body": plain,
"format": "org.matrix.custom.html",
"formatted_body": formatted,
}
self.post(self.get_url(), json=payload)
class Discord(HttpTransport):
def notify(self, check, notification=None) -> None:
text = tmpl("slack_message.json", check=check)
payload = json.loads(text)
url = self.channel.discord_webhook_url + "/slack"
self.post(url, json=payload)
class MigrationRequiredError(TransportError):
def __init__(self, message, new_chat_id: int):
super().__init__(message, permanent=True)
self.new_chat_id = new_chat_id
class Telegram(HttpTransport):
SM = "https://api.telegram.org/bot%s/sendMessage" % settings.TELEGRAM_TOKEN
@classmethod
def raise_for_response(cls, response):
message = f"Received status code {response.status_code}"
try:
doc = response.json()
except ValueError:
raise TransportError(message)
# If the error payload contains the migrate_to_chat_id field,
# raise MigrationRequiredError, with the new chat_id included
try:
jsonschema.validate(doc, telegram_migration)
description = doc["description"]
chat_id = doc["parameters"]["migrate_to_chat_id"]
raise MigrationRequiredError(description, chat_id)
except jsonschema.ValidationError:
pass
permanent = False
description = doc.get("description")
if isinstance(description, str):
message += f' with a message: "{description}"'
if description == "Forbidden: the group chat was deleted":
permanent = True
raise TransportError(message, permanent=permanent)
@classmethod
def send(cls, chat_id, text):
# Telegram.send is a separate method because it is also used in
# hc.front.views.telegram_bot to send invite links.
cls.post(cls.SM, json={"chat_id": chat_id, "text": text, "parse_mode": "html"})
def notify(self, check, notification=None) -> None:
from hc.api.models import TokenBucket
if not TokenBucket.authorize_telegram(self.channel.telegram_id):
raise TransportError("Rate limit exceeded")
ctx = {"check": check, "down_checks": self.down_checks(check)}
text = tmpl("telegram_message.html", **ctx)
try:
self.send(self.channel.telegram_id, text)
except MigrationRequiredError as e:
# Save the new chat_id, then try sending again:
self.channel.update_telegram_id(e.new_chat_id)
self.send(self.channel.telegram_id, text)
class Sms(HttpTransport):
URL = "https://api.twilio.com/2010-04-01/Accounts/%s/Messages.json"
def is_noop(self, check) -> bool:
if check.status == "down":
return not self.channel.sms_notify_down
else:
return not self.channel.sms_notify_up
def notify(self, check, notification=None) -> None:
profile = Profile.objects.for_user(self.channel.project.owner)
if not profile.authorize_sms():
profile.send_sms_limit_notice("SMS")
raise TransportError("Monthly SMS limit exceeded")
url = self.URL % settings.TWILIO_ACCOUNT
auth = (settings.TWILIO_ACCOUNT, settings.TWILIO_AUTH)
text = tmpl("sms_message.html", check=check, site_name=settings.SITE_NAME)
data = {
"From": settings.TWILIO_FROM,
"To": self.channel.phone_number,
"Body": text,
}
if notification:
data["StatusCallback"] = notification.status_url()
self.post(url, data=data, auth=auth)
class Call(HttpTransport):
URL = "https://api.twilio.com/2010-04-01/Accounts/%s/Calls.json"
def is_noop(self, check) -> bool:
return check.status != "down"
def notify(self, check, notification=None) -> None:
profile = Profile.objects.for_user(self.channel.project.owner)
if not profile.authorize_call():
profile.send_call_limit_notice()
raise TransportError("Monthly phone call limit exceeded")
url = self.URL % settings.TWILIO_ACCOUNT
auth = (settings.TWILIO_ACCOUNT, settings.TWILIO_AUTH)
twiml = tmpl("call_message.html", check=check, site_name=settings.SITE_NAME)
data = {
"From": settings.TWILIO_FROM,
"To": self.channel.phone_number,
"Twiml": twiml,
}
if notification:
data["StatusCallback"] = notification.status_url()
self.post(url, data=data, auth=auth)
class WhatsApp(HttpTransport):
URL = "https://api.twilio.com/2010-04-01/Accounts/%s/Messages.json"
def is_noop(self, check) -> bool:
if check.status == "down":
return not self.channel.whatsapp_notify_down
else:
return not self.channel.whatsapp_notify_up
def notify(self, check, notification=None) -> None:
profile = Profile.objects.for_user(self.channel.project.owner)
if not profile.authorize_sms():
profile.send_sms_limit_notice("WhatsApp")
raise TransportError("Monthly message limit exceeded")
url = self.URL % settings.TWILIO_ACCOUNT
auth = (settings.TWILIO_ACCOUNT, settings.TWILIO_AUTH)
text = tmpl("whatsapp_message.html", check=check, site_name=settings.SITE_NAME)
data = {
"From": "whatsapp:%s" % settings.TWILIO_FROM,
"To": "whatsapp:%s" % self.channel.phone_number,
"Body": text,
}
if notification:
data["StatusCallback"] = notification.status_url()
self.post(url, data=data, auth=auth)
class Trello(HttpTransport):
URL = "https://api.trello.com/1/cards"
def is_noop(self, check) -> bool:
return check.status != "down"
def notify(self, check, notification=None) -> None:
params = {
"idList": self.channel.trello_list_id,
"name": tmpl("trello_name.html", check=check),
"desc": tmpl("trello_desc.html", check=check),
"key": settings.TRELLO_APP_KEY,
"token": self.channel.trello_token,
}
self.post(self.URL, params=params)
class Apprise(HttpTransport):
def notify(self, check, notification=None) -> None:
if not settings.APPRISE_ENABLED:
# Not supported and/or enabled
raise TransportError("Apprise is disabled and/or not installed")
a = apprise.Apprise()
title = tmpl("apprise_title.html", check=check)
body = tmpl("apprise_description.html", check=check)
a.add(self.channel.value)
notify_type = (
apprise.NotifyType.SUCCESS
if check.status == "up"
else apprise.NotifyType.FAILURE
)
if not a.notify(body=body, title=title, notify_type=notify_type):
raise TransportError("Failed")
class MsTeams(HttpTransport):
def escape_md(self, s):
# Escape special HTML characters
s = escape(s)
# Escape characters that have special meaning in Markdown
for c in r"\`*_{}[]()#+-.!|":
s = s.replace(c, "\\" + c)
return s
def notify(self, check, notification=None) -> None:
if not settings.MSTEAMS_ENABLED:
raise TransportError("MS Teams notifications are not enabled.")
text = tmpl("msteams_message.json", check=check)
payload = json.loads(text)
# MS Teams escapes HTML special characters in the summary field.
# It does not interpret summary content as Markdown.
name = check.name_then_code()
payload["summary"] = f"“{name}” is {check.status.upper()}."
# MS teams *strips* HTML special characters from the title field.
# To avoid that, we use escape().
# It does not interpret title as Markdown.
safe_name = escape(name)
payload["title"] = f"“{safe_name}” is {check.status.upper()}."
# MS teams allows some HTML in the section text.
# It also interprets the section text as Markdown.
# We want to display the raw content, angle brackets and all,
# so we run escape() and then additionally escape Markdown:
payload["sections"][0]["text"] = self.escape_md(check.desc)
self.post(self.channel.value, json=payload)
class Zulip(HttpTransport):
@classmethod
def raise_for_response(cls, response):
message = f"Received status code {response.status_code}"
try:
details = response.json().get("msg")
if isinstance(details, str):
message += f' with a message: "{details}"'
except ValueError:
pass
raise TransportError(message)
def notify(self, check, notification=None) -> None:
if not settings.ZULIP_ENABLED:
raise TransportError("Zulip notifications are not enabled.")
url = self.channel.zulip_site + "/api/v1/messages"
auth = (self.channel.zulip_bot_email, self.channel.zulip_api_key)
data = {
"type": self.channel.zulip_type,
"to": self.channel.zulip_to,
"topic": tmpl("zulip_topic.html", check=check),
"content": tmpl("zulip_content.html", check=check),
}
self.post(url, data=data, auth=auth)
class Spike(HttpTransport):
def notify(self, check, notification=None) -> None:
if not settings.SPIKE_ENABLED:
raise TransportError("Spike notifications are not enabled.")
url = self.channel.value
headers = {"Conent-Type": "application/json"}
payload = {
"check_id": str(check.code),
"title": tmpl("spike_title.html", check=check),
"message": tmpl("spike_description.html", check=check),
"status": check.status,
}
self.post(url, json=payload, headers=headers)
class LineNotify(HttpTransport):
URL = "https://notify-api.line.me/api/notify"
def notify(self, check, notification=None) -> None:
headers = {
"Content-Type": "application/x-www-form-urlencoded",
"Authorization": "Bearer %s" % self.channel.linenotify_token,
}
payload = {"message": tmpl("linenotify_message.html", check=check)}
self.post(self.URL, headers=headers, params=payload)
class Signal(Transport):
def is_noop(self, check) -> bool:
if check.status == "down":
return not self.channel.signal_notify_down
else:
return not self.channel.signal_notify_up
def send(self, recipient, message):
payload = {
"jsonrpc": "2.0",
"method": "send",
"params": {"recipient": [recipient], "message": message},
"id": str(uuid.uuid4()),
}
payload_bytes = (json.dumps(payload) + "\n").encode()
for reply_bytes in self._read_replies(payload_bytes):
try:
reply = json.loads(reply_bytes.decode())
except ValueError:
raise TransportError("signal-cli call failed (unexpected response)")
if reply.get("id") == payload["id"]:
if "error" not in reply:
# success!
break
message = reply["error"].get("message", "")
if "UnregisteredUserException" in message:
raise TransportError("Recipient not found")
code = reply["error"].get("code")
raise TransportError("signal-cli call failed (%s)" % code)
def _read_replies(self, payload_bytes):
"""Send a request to signal-cli over UNIX socket. Read and yield replies.
This method:
* opens UNIX socket
* sends the request data (JSON RPC data encoded as bytes)
* reads newline-terminated responses and yields them
Individual sendall and recv operations have a timeout of 15 seconds.
This method also keeps track of total time spent in the method, and raises
an exception when the total time exceeds 15 seconds.
"""
start = time.time()
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as s:
s.settimeout(15)
try:
s.connect(settings.SIGNAL_CLI_SOCKET)
s.sendall(payload_bytes)
s.shutdown(socket.SHUT_WR) # we are done sending
buffer = []
while True:
ch = s.recv(1)
buffer.append(ch)
if ch in (b"\n", b""):
yield b"".join(buffer)
buffer = []
if time.time() - start > 15:
raise TransportError("signal-cli call timed out")
except OSError as e:
raise TransportError("signal-cli call failed (%s)" % e)
def notify(self, check, notification=None) -> None:
if not settings.SIGNAL_CLI_SOCKET:
raise TransportError("Signal notifications are not enabled")
from hc.api.models import TokenBucket
if not TokenBucket.authorize_signal(self.channel.phone_number):
raise TransportError("Rate limit exceeded")
ctx = {"check": check, "down_checks": self.down_checks(check)}
text = tmpl("signal_message.html", **ctx)
self.send(self.channel.phone_number, text)
| {
"content_hash": "dd65955edbed6677ddf24309e6bfce59",
"timestamp": "",
"source": "github",
"line_count": 889,
"max_line_length": 87,
"avg_line_length": 34.514060742407196,
"alnum_prop": 0.5966496105335202,
"repo_name": "iphoting/healthchecks",
"id": "248bcf3c2e8927cc64f3252a9fbadf45fb5ba5e2",
"size": "30691",
"binary": false,
"copies": "1",
"ref": "refs/heads/heroku",
"path": "hc/api/transports.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "64145"
},
{
"name": "Dockerfile",
"bytes": "939"
},
{
"name": "HTML",
"bytes": "595497"
},
{
"name": "JavaScript",
"bytes": "55883"
},
{
"name": "Less",
"bytes": "14135"
},
{
"name": "Python",
"bytes": "894208"
},
{
"name": "Shell",
"bytes": "4382"
}
],
"symlink_target": ""
} |
Load Data
========================
Sample Data Sets
----------------
PixieDust comes with sample data. To start playing with the display() API and other PixieDust features, load and then visualize one of our many sample data sets.
To call the list of data sets, run the following command in your notebook:
::
pixiedust.sampleData()
You get a list of the data sets included with PixieDust.
.. image:: _images/sample_data_sets.png
:height: 522 px
:width: 1384 px
:scale: 50 %
:alt: Screenshot of PixieDust's sampleData() method.
.. raw:: html
<!-- START EXCLUDE -->
.. note::
If you get an error, and you're running Spark 1.6, run the following command to manually install packages missing in 1.6 (You need to do so only once.):
::
pixiedust.installPackage("com.databricks:spark-csv_2.10:1.5.0")
pixiedust.installPackage("org.apache.commons:commons-csv:0")
.. raw:: html
<!-- END EXCLUDE -->
To create a pySpark DataFrame for one of the samples, just enter its number in the following command. For example, to load Set 6, Million Dollar Home sales, run the command:
::
home_df = pixiedust.sampleData(6)
Load a CSV using its URL
------------------------
You can also replace the number with a URL. If you have a CSV file online, access it by entering the URL in the parentheses, like this:
::
home_df = pixiedust.sampleData("https://openobjectstore.mybluemix.net/misc/milliondollarhomes.csv")
Load data from your local system
--------------------------------
Loading a CSV from your local file system is equally simple. Drop in the file path, like so:
::
pixiedust.sampleData('file:///Users/bradfordnoble/pixiedust/data/nz.csv')
Other Data Sources
------------------
PixieDust provides these sample data sets as a convenience to help you get started fast. To load or connect to your own data source, follow the steps you normally would from within a notebook. Our team has created some notebook tutorials which show how to connect to Cloudant, Twitter, and other data sources. See: `Predict Flight Delays with Apache Spark MLLib, FlightStats, and Weather Data <https://developer.ibm.com/clouddataservices/2016/08/04/predict-flight-delays-with-apache-spark-mllib-flightstats-and-weather-data/>`_ and `Sentiment Analysis of Twitter Hashtags <https://developer.ibm.com/clouddataservices/2015/10/06/sentiment-analysis-of-twitter-hashtags/>`_ | {
"content_hash": "0ad2993f93c95404797a2077776d6432",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 672,
"avg_line_length": 32.026315789473685,
"alnum_prop": 0.7046014790468365,
"repo_name": "ibm-cds-labs/pixiedust",
"id": "526edbf71f31e414b744b1468fb8095625ccf7c2",
"size": "2434",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docsrc/source/loaddata.rst",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7264"
},
{
"name": "CSS",
"bytes": "5828"
},
{
"name": "HTML",
"bytes": "175308"
},
{
"name": "Java",
"bytes": "5512"
},
{
"name": "JavaScript",
"bytes": "73627"
},
{
"name": "Jupyter Notebook",
"bytes": "6453539"
},
{
"name": "Makefile",
"bytes": "7765"
},
{
"name": "Python",
"bytes": "398476"
},
{
"name": "Scala",
"bytes": "13764"
},
{
"name": "Smarty",
"bytes": "821"
}
],
"symlink_target": ""
} |
package com.google.common.primitives;
import static com.google.common.base.Preconditions.checkArgument;
import com.google.common.annotations.Beta;
import com.google.common.annotations.GwtCompatible;
import com.google.common.base.Preconditions;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.errorprone.annotations.CheckReturnValue;
import com.google.errorprone.annotations.Immutable;
import java.io.Serializable;
import java.util.AbstractList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.RandomAccess;
import org.checkerframework.checker.nullness.compatqual.NullableDecl;
/**
* An immutable array of {@code int} values, with an API resembling {@link List}.
*
* <p>Advantages compared to {@code int[]}:
*
* <ul>
* <li>All the many well-known advantages of immutability (read <i>Effective Java</i>, second
* edition, Item 15).
* <li>Has the value-based (not identity-based) {@link #equals}, {@link #hashCode}, and {@link
* #toString} behavior you expect
* <li>Offers useful operations beyond just {@code get} and {@code length}, so you don't have to
* hunt through classes like {@link Arrays} and {@link Ints} for them.
* <li>Supports a copy-free {@link #subArray} view, so methods that accept this type don't need to
* add overloads that accept start and end indexes.
* <li>Access to all collection-based utilities via {@link #asList} (though at the cost of
* allocating garbage).
* </ul>
*
* <p>Disadvantages compared to {@code int[]}:
*
* <ul>
* <li>Memory footprint has a fixed overhead (about 24 bytes per instance).
* <li><i>Some</i> construction use cases force the data to be copied (though several construction
* APIs are offered that don't).
* <li>Can't be passed directly to methods that expect {@code int[]} (though the most common
* utilities do have replacements here).
* <li>Dependency on {@code com.google.common} / Guava.
* </ul>
*
* <p>Advantages compared to {@link com.google.common.collect.ImmutableList ImmutableList}{@code
* <Integer>}:
*
* <ul>
* <li>Improved memory compactness and locality
* <li>Can be queried without allocating garbage
* </ul>
*
* <p>Disadvantages compared to {@code ImmutableList<Integer>}:
*
* <ul>
* <li>Can't be passed directly to methods that expect {@code Iterable}, {@code Collection}, or
* {@code List} (though the most common utilities do have replacements here, and there is a
* lazy {@link #asList} view).
* </ul>
*
* @since 22.0
*/
@Beta
@GwtCompatible
@Immutable
public final class ImmutableIntArray implements Serializable {
private static final ImmutableIntArray EMPTY = new ImmutableIntArray(new int[0]);
/** Returns the empty array. */
public static ImmutableIntArray of() {
return EMPTY;
}
/** Returns an immutable array containing a single value. */
public static ImmutableIntArray of(int e0) {
return new ImmutableIntArray(new int[] {e0});
}
/** Returns an immutable array containing the given values, in order. */
public static ImmutableIntArray of(int e0, int e1) {
return new ImmutableIntArray(new int[] {e0, e1});
}
/** Returns an immutable array containing the given values, in order. */
public static ImmutableIntArray of(int e0, int e1, int e2) {
return new ImmutableIntArray(new int[] {e0, e1, e2});
}
/** Returns an immutable array containing the given values, in order. */
public static ImmutableIntArray of(int e0, int e1, int e2, int e3) {
return new ImmutableIntArray(new int[] {e0, e1, e2, e3});
}
/** Returns an immutable array containing the given values, in order. */
public static ImmutableIntArray of(int e0, int e1, int e2, int e3, int e4) {
return new ImmutableIntArray(new int[] {e0, e1, e2, e3, e4});
}
/** Returns an immutable array containing the given values, in order. */
public static ImmutableIntArray of(int e0, int e1, int e2, int e3, int e4, int e5) {
return new ImmutableIntArray(new int[] {e0, e1, e2, e3, e4, e5});
}
// TODO(kevinb): go up to 11?
/** Returns an immutable array containing the given values, in order. */
// Use (first, rest) so that `of(someIntArray)` won't compile (they should use copyOf), which is
// okay since we have to copy the just-created array anyway.
public static ImmutableIntArray of(int first, int... rest) {
int[] array = new int[rest.length + 1];
array[0] = first;
System.arraycopy(rest, 0, array, 1, rest.length);
return new ImmutableIntArray(array);
}
/** Returns an immutable array containing the given values, in order. */
public static ImmutableIntArray copyOf(int[] values) {
return values.length == 0 ? EMPTY : new ImmutableIntArray(Arrays.copyOf(values, values.length));
}
/** Returns an immutable array containing the given values, in order. */
public static ImmutableIntArray copyOf(Collection<Integer> values) {
return values.isEmpty() ? EMPTY : new ImmutableIntArray(Ints.toArray(values));
}
/**
* Returns an immutable array containing the given values, in order.
*
* <p><b>Performance note:</b> this method delegates to {@link #copyOf(Collection)} if {@code
* values} is a {@link Collection}. Otherwise it creates a {@link #builder} and uses {@link
* Builder#addAll(Iterable)}, with all the performance implications associated with that.
*/
public static ImmutableIntArray copyOf(Iterable<Integer> values) {
if (values instanceof Collection) {
return copyOf((Collection<Integer>) values);
}
return builder().addAll(values).build();
}
/**
* Returns a new, empty builder for {@link ImmutableIntArray} instances, sized to hold up to
* {@code initialCapacity} values without resizing. The returned builder is not thread-safe.
*
* <p><b>Performance note:</b> When feasible, {@code initialCapacity} should be the exact number
* of values that will be added, if that knowledge is readily available. It is better to guess a
* value slightly too high than slightly too low. If the value is not exact, the {@link
* ImmutableIntArray} that is built will very likely occupy more memory than strictly necessary;
* to trim memory usage, build using {@code builder.build().trimmed()}.
*/
public static Builder builder(int initialCapacity) {
checkArgument(initialCapacity >= 0, "Invalid initialCapacity: %s", initialCapacity);
return new Builder(initialCapacity);
}
/**
* Returns a new, empty builder for {@link ImmutableIntArray} instances, with a default initial
* capacity. The returned builder is not thread-safe.
*
* <p><b>Performance note:</b> The {@link ImmutableIntArray} that is built will very likely occupy
* more memory than necessary; to trim memory usage, build using {@code
* builder.build().trimmed()}.
*/
public static Builder builder() {
return new Builder(10);
}
/**
* A builder for {@link ImmutableIntArray} instances; obtained using {@link
* ImmutableIntArray#builder}.
*/
@CanIgnoreReturnValue
public static final class Builder {
private int[] array;
private int count = 0; // <= array.length
Builder(int initialCapacity) {
array = new int[initialCapacity];
}
/**
* Appends {@code value} to the end of the values the built {@link ImmutableIntArray} will
* contain.
*/
public Builder add(int value) {
ensureRoomFor(1);
array[count] = value;
count += 1;
return this;
}
/**
* Appends {@code values}, in order, to the end of the values the built {@link
* ImmutableIntArray} will contain.
*/
public Builder addAll(int[] values) {
ensureRoomFor(values.length);
System.arraycopy(values, 0, array, count, values.length);
count += values.length;
return this;
}
/**
* Appends {@code values}, in order, to the end of the values the built {@link
* ImmutableIntArray} will contain.
*/
public Builder addAll(Iterable<Integer> values) {
if (values instanceof Collection) {
return addAll((Collection<Integer>) values);
}
for (Integer value : values) {
add(value);
}
return this;
}
/**
* Appends {@code values}, in order, to the end of the values the built {@link
* ImmutableIntArray} will contain.
*/
public Builder addAll(Collection<Integer> values) {
ensureRoomFor(values.size());
for (Integer value : values) {
array[count++] = value;
}
return this;
}
/**
* Appends {@code values}, in order, to the end of the values the built {@link
* ImmutableIntArray} will contain.
*/
public Builder addAll(ImmutableIntArray values) {
ensureRoomFor(values.length());
System.arraycopy(values.array, values.start, array, count, values.length());
count += values.length();
return this;
}
private void ensureRoomFor(int numberToAdd) {
int newCount = count + numberToAdd; // TODO(kevinb): check overflow now?
if (newCount > array.length) {
int[] newArray = new int[expandedCapacity(array.length, newCount)];
System.arraycopy(array, 0, newArray, 0, count);
this.array = newArray;
}
}
// Unfortunately this is pasted from ImmutableCollection.Builder.
private static int expandedCapacity(int oldCapacity, int minCapacity) {
if (minCapacity < 0) {
throw new AssertionError("cannot store more than MAX_VALUE elements");
}
// careful of overflow!
int newCapacity = oldCapacity + (oldCapacity >> 1) + 1;
if (newCapacity < minCapacity) {
newCapacity = Integer.highestOneBit(minCapacity - 1) << 1;
}
if (newCapacity < 0) {
newCapacity = Integer.MAX_VALUE; // guaranteed to be >= newCapacity
}
return newCapacity;
}
/**
* Returns a new immutable array. The builder can continue to be used after this call, to append
* more values and build again.
*
* <p><b>Performance note:</b> the returned array is backed by the same array as the builder, so
* no data is copied as part of this step, but this may occupy more memory than strictly
* necessary. To copy the data to a right-sized backing array, use {@code .build().trimmed()}.
*/
@CheckReturnValue
public ImmutableIntArray build() {
return count == 0 ? EMPTY : new ImmutableIntArray(array, 0, count);
}
}
// Instance stuff here
// The array is never mutated after storing in this field and the construction strategies ensure
// it doesn't escape this class
@SuppressWarnings("Immutable")
private final int[] array;
/*
* TODO(kevinb): evaluate the trade-offs of going bimorphic to save these two fields from most
* instances. Note that the instances that would get smaller are the right set to care about
* optimizing, because the rest have the option of calling `trimmed`.
*/
private final transient int start; // it happens that we only serialize instances where this is 0
private final int end; // exclusive
private ImmutableIntArray(int[] array) {
this(array, 0, array.length);
}
private ImmutableIntArray(int[] array, int start, int end) {
this.array = array;
this.start = start;
this.end = end;
}
/** Returns the number of values in this array. */
public int length() {
return end - start;
}
/** Returns {@code true} if there are no values in this array ({@link #length} is zero). */
public boolean isEmpty() {
return end == start;
}
/**
* Returns the {@code int} value present at the given index.
*
* @throws IndexOutOfBoundsException if {@code index} is negative, or greater than or equal to
* {@link #length}
*/
public int get(int index) {
Preconditions.checkElementIndex(index, length());
return array[start + index];
}
/**
* Returns the smallest index for which {@link #get} returns {@code target}, or {@code -1} if no
* such index exists. Equivalent to {@code asList().indexOf(target)}.
*/
public int indexOf(int target) {
for (int i = start; i < end; i++) {
if (array[i] == target) {
return i - start;
}
}
return -1;
}
/**
* Returns the largest index for which {@link #get} returns {@code target}, or {@code -1} if no
* such index exists. Equivalent to {@code asList().lastIndexOf(target)}.
*/
public int lastIndexOf(int target) {
for (int i = end - 1; i >= start; i--) {
if (array[i] == target) {
return i - start;
}
}
return -1;
}
/**
* Returns {@code true} if {@code target} is present at any index in this array. Equivalent to
* {@code asList().contains(target)}.
*/
public boolean contains(int target) {
return indexOf(target) >= 0;
}
/** Returns a new, mutable copy of this array's values, as a primitive {@code int[]}. */
public int[] toArray() {
return Arrays.copyOfRange(array, start, end);
}
/**
* Returns a new immutable array containing the values in the specified range.
*
* <p><b>Performance note:</b> The returned array has the same full memory footprint as this one
* does (no actual copying is performed). To reduce memory usage, use {@code subArray(start,
* end).trimmed()}.
*/
public ImmutableIntArray subArray(int startIndex, int endIndex) {
Preconditions.checkPositionIndexes(startIndex, endIndex, length());
return startIndex == endIndex
? EMPTY
: new ImmutableIntArray(array, start + startIndex, start + endIndex);
}
/**
* Returns an immutable <i>view</i> of this array's values as a {@code List}; note that {@code
* int} values are boxed into {@link Integer} instances on demand, which can be very expensive.
* The returned list should be used once and discarded. For any usages beyond that, pass the
* returned list to {@link com.google.common.collect.ImmutableList#copyOf(Collection)
* ImmutableList.copyOf} and use that list instead.
*/
public List<Integer> asList() {
/*
* Typically we cache this kind of thing, but much repeated use of this view is a performance
* anti-pattern anyway. If we cache, then everyone pays a price in memory footprint even if
* they never use this method.
*/
return new AsList(this);
}
static class AsList extends AbstractList<Integer> implements RandomAccess, Serializable {
private final ImmutableIntArray parent;
private AsList(ImmutableIntArray parent) {
this.parent = parent;
}
// inherit: isEmpty, containsAll, toArray x2, {,list,spl}iterator, stream, forEach, mutations
@Override
public int size() {
return parent.length();
}
@Override
public Integer get(int index) {
return parent.get(index);
}
@Override
public boolean contains(Object target) {
return indexOf(target) >= 0;
}
@Override
public int indexOf(Object target) {
return target instanceof Integer ? parent.indexOf((Integer) target) : -1;
}
@Override
public int lastIndexOf(Object target) {
return target instanceof Integer ? parent.lastIndexOf((Integer) target) : -1;
}
@Override
public List<Integer> subList(int fromIndex, int toIndex) {
return parent.subArray(fromIndex, toIndex).asList();
}
@Override
public boolean equals(@NullableDecl Object object) {
if (object instanceof AsList) {
AsList that = (AsList) object;
return this.parent.equals(that.parent);
}
// We could delegate to super now but it would still box too much
if (!(object instanceof List)) {
return false;
}
List<?> that = (List<?>) object;
if (this.size() != that.size()) {
return false;
}
int i = parent.start;
// Since `that` is very likely RandomAccess we could avoid allocating this iterator...
for (Object element : that) {
if (!(element instanceof Integer) || parent.array[i++] != (Integer) element) {
return false;
}
}
return true;
}
// Because we happen to use the same formula. If that changes, just don't override this.
@Override
public int hashCode() {
return parent.hashCode();
}
@Override
public String toString() {
return parent.toString();
}
}
/**
* Returns {@code true} if {@code object} is an {@code ImmutableIntArray} containing the same
* values as this one, in the same order.
*/
@Override
public boolean equals(@NullableDecl Object object) {
if (object == this) {
return true;
}
if (!(object instanceof ImmutableIntArray)) {
return false;
}
ImmutableIntArray that = (ImmutableIntArray) object;
if (this.length() != that.length()) {
return false;
}
for (int i = 0; i < length(); i++) {
if (this.get(i) != that.get(i)) {
return false;
}
}
return true;
}
/** Returns an unspecified hash code for the contents of this immutable array. */
@Override
public int hashCode() {
int hash = 1;
for (int i = start; i < end; i++) {
hash *= 31;
hash += Ints.hashCode(array[i]);
}
return hash;
}
/**
* Returns a string representation of this array in the same form as {@link
* Arrays#toString(int[])}, for example {@code "[1, 2, 3]"}.
*/
@Override
public String toString() {
if (isEmpty()) {
return "[]";
}
StringBuilder builder = new StringBuilder(length() * 5); // rough estimate is fine
builder.append('[').append(array[start]);
for (int i = start + 1; i < end; i++) {
builder.append(", ").append(array[i]);
}
builder.append(']');
return builder.toString();
}
/**
* Returns an immutable array containing the same values as {@code this} array. This is logically
* a no-op, and in some circumstances {@code this} itself is returned. However, if this instance
* is a {@link #subArray} view of a larger array, this method will copy only the appropriate range
* of values, resulting in an equivalent array with a smaller memory footprint.
*/
public ImmutableIntArray trimmed() {
return isPartialView() ? new ImmutableIntArray(toArray()) : this;
}
private boolean isPartialView() {
return start > 0 || end < array.length;
}
Object writeReplace() {
return trimmed();
}
Object readResolve() {
return isEmpty() ? EMPTY : this;
}
}
| {
"content_hash": "5537f7b0fca6e7d1ff32e336371f649a",
"timestamp": "",
"source": "github",
"line_count": 549,
"max_line_length": 100,
"avg_line_length": 33.899817850637525,
"alnum_prop": 0.6582128848530439,
"repo_name": "berndhopp/guava",
"id": "db769d7f077ba30d98b55c8bb6b2dc95d1ba7ddb",
"size": "19205",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "android/guava/src/com/google/common/primitives/ImmutableIntArray.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "11478"
},
{
"name": "Java",
"bytes": "25350522"
},
{
"name": "Shell",
"bytes": "1885"
}
],
"symlink_target": ""
} |
Dynamic Mapping Grid Creation
| {
"content_hash": "94eb0fc532fddaa5c614f6b81f8f89f3",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 29,
"avg_line_length": 30,
"alnum_prop": 0.8666666666666667,
"repo_name": "bmulcahy/DynamicGrid",
"id": "bbdf553ce859f2c911d574c486ad2202946ab359",
"size": "44",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __assign = (this && this.__assign) || Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
define(["require", "exports", "react", "../../Utilities", "./Overlay.scss"], function (require, exports, React, Utilities_1, stylesImport) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var styles = stylesImport;
var Overlay = (function (_super) {
__extends(Overlay, _super);
function Overlay() {
return _super !== null && _super.apply(this, arguments) || this;
}
Overlay.prototype.componentDidMount = function () {
Utilities_1.disableBodyScroll();
};
Overlay.prototype.componentWillUnmount = function () {
Utilities_1.enableBodyScroll();
};
Overlay.prototype.render = function () {
var _a = this.props, isDarkThemed = _a.isDarkThemed, className = _a.className;
var divProps = Utilities_1.getNativeProps(this.props, Utilities_1.divProperties);
var modifiedClassName = Utilities_1.css('ms-Overlay', styles.root, className, (_b = {},
_b['ms-Overlay--dark ' + styles.rootIsDark] = isDarkThemed,
_b));
return (React.createElement("div", __assign({}, divProps, { className: modifiedClassName })));
var _b;
};
return Overlay;
}(Utilities_1.BaseComponent));
exports.Overlay = Overlay;
});
//# sourceMappingURL=Overlay.js.map
| {
"content_hash": "8bdd3652ca52223d2ffa4538c0644567",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 140,
"avg_line_length": 44.895833333333336,
"alnum_prop": 0.5633410672853828,
"repo_name": "SpatialMap/SpatialMapDev",
"id": "ad75022194f617f3e63f462f37f0b0e6310228ee",
"size": "2155",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "node_modules/office-ui-fabric-react/lib-amd/components/Overlay/Overlay.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "32148"
},
{
"name": "HTML",
"bytes": "14793"
},
{
"name": "JavaScript",
"bytes": "186767"
}
],
"symlink_target": ""
} |
module.exports = function(grunt) {
// Project configuration.
grunt.initConfig({
connect: {
server: {
options: {
port: 8000,
keepalive: true,
open: {
target: 'http://localhost:8000'
}
}
}
}
});
grunt.loadNpmTasks('grunt-contrib-connect');
grunt.registerTask('default', ['connect']);
}; | {
"content_hash": "efc6ff21bcc8b5e17a02f25ead5d0bcf",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 55,
"avg_line_length": 25.31578947368421,
"alnum_prop": 0.4074844074844075,
"repo_name": "Tedris/auto-quest-game",
"id": "a08f96b3b1a7d3f649f909e47ae6542c1a8ef90a",
"size": "481",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Gruntfile.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "718"
},
{
"name": "HTML",
"bytes": "2648"
},
{
"name": "JavaScript",
"bytes": "8324"
}
],
"symlink_target": ""
} |
window.App = {
get: function(path, defaultValue) {
var parts = path.split(".");
var getValue = function(context, variable, rest) {
if (context && context[variable]) {
if (rest.length > 0) {
return getValue(context[variable], _.first(rest), _.rest(rest));
} else {
var obj = context[variable];
return _.isFunction(obj) ? obj.call(null) : obj;
}
} else {
return defaultValue;
}
};
return getValue(this, _.first(parts), _.rest(parts));
},
has: function(path) {
var value = this.get(path, undefined);
return value !== undefined;
},
set: function(path, value) {
var parts = path.split(".");
var setValue = function(context, variable, rest) {
if (!_.has(context, variable)) {
context[variable] = {};
}
if (rest.length > 0) {
setValue(context[variable], _.first(rest), _.rest(rest));
} else {
context[variable] = value;
}
};
return setValue(this, _.first(parts), _.rest(parts));
}
};
| {
"content_hash": "0e00d412d61134217465cd1477782a55",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 74,
"avg_line_length": 26.675,
"alnum_prop": 0.5417057169634489,
"repo_name": "llorsat/wonkajs",
"id": "124733abe5d38206f245919f5e44fc4f622b79f5",
"size": "1146",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "templates/core/app.js",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2070"
},
{
"name": "Handlebars",
"bytes": "7024"
},
{
"name": "JavaScript",
"bytes": "943372"
}
],
"symlink_target": ""
} |
using namespace json_spirit;
using namespace std;
int64 nWalletUnlockTime;
static CCriticalSection cs_nWalletUnlockTime;
extern void TxToJSON(const CTransaction& tx, const uint256 hashBlock, json_spirit::Object& entry);
std::string HelpRequiringPassphrase()
{
return pwalletMain->IsCrypted()
? "\nrequires wallet passphrase to be set with walletpassphrase first"
: "";
}
void EnsureWalletIsUnlocked()
{
if (pwalletMain->IsLocked())
throw JSONRPCError(RPC_WALLET_UNLOCK_NEEDED, "Error: Please enter the wallet passphrase with walletpassphrase first.");
if (fWalletUnlockMintOnly)
throw JSONRPCError(RPC_WALLET_UNLOCK_NEEDED, "Error: Wallet unlocked for block minting only.");
}
void WalletTxToJSON(const CWalletTx& wtx, Object& entry)
{
int confirms = wtx.GetDepthInMainChain();
entry.push_back(Pair("confirmations", confirms));
if (wtx.IsCoinBase() || wtx.IsCoinStake())
entry.push_back(Pair("generated", true));
if (confirms)
{
entry.push_back(Pair("blockhash", wtx.hashBlock.GetHex()));
entry.push_back(Pair("blockindex", wtx.nIndex));
entry.push_back(Pair("blocktime", (boost::int64_t)(mapBlockIndex[wtx.hashBlock]->nTime)));
}
entry.push_back(Pair("txid", wtx.GetHash().GetHex()));
entry.push_back(Pair("time", (boost::int64_t)wtx.GetTxTime()));
entry.push_back(Pair("timereceived", (boost::int64_t)wtx.nTimeReceived));
BOOST_FOREACH(const PAIRTYPE(string,string)& item, wtx.mapValue)
entry.push_back(Pair(item.first, item.second));
}
string AccountFromValue(const Value& value)
{
string strAccount = value.get_str();
if (strAccount == "*")
throw JSONRPCError(RPC_WALLET_INVALID_ACCOUNT_NAME, "Invalid account name");
return strAccount;
}
Value getinfo(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 0)
throw runtime_error(
"getinfo\n"
"Returns an object containing various state info.");
proxyType proxy;
GetProxy(NET_IPV4, proxy);
Object obj;
obj.push_back(Pair("version", FormatFullVersion()));
obj.push_back(Pair("protocolversion",(int)PROTOCOL_VERSION));
obj.push_back(Pair("walletversion", pwalletMain->GetVersion()));
obj.push_back(Pair("balance", ValueFromAmount(pwalletMain->GetBalance())));
obj.push_back(Pair("newmint", ValueFromAmount(pwalletMain->GetNewMint())));
obj.push_back(Pair("stake", ValueFromAmount(pwalletMain->GetStake())));
obj.push_back(Pair("blocks", (int)nBestHeight));
obj.push_back(Pair("moneysupply", ValueFromAmount(pindexBest->nMoneySupply)));
obj.push_back(Pair("connections", (int)vNodes.size()));
obj.push_back(Pair("proxy", (proxy.first.IsValid() ? proxy.first.ToStringIPPort() : string())));
obj.push_back(Pair("ip", addrSeenByPeer.ToStringIP()));
obj.push_back(Pair("difficulty", (double)GetDifficulty()));
obj.push_back(Pair("testnet", fTestNet));
obj.push_back(Pair("keypoololdest", (boost::int64_t)pwalletMain->GetOldestKeyPoolTime()));
obj.push_back(Pair("keypoolsize", pwalletMain->GetKeyPoolSize()));
obj.push_back(Pair("paytxfee", ValueFromAmount(nTransactionFee)));
if (pwalletMain->IsCrypted())
obj.push_back(Pair("unlocked_until", (boost::int64_t)nWalletUnlockTime / 1000));
obj.push_back(Pair("errors", GetWarnings("statusbar")));
return obj;
}
Value getnewpubkey(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw runtime_error(
"getnewpubkey [account]\n"
"Returns new public key for coinbase generation.");
// Parse the account first so we don't generate a key if there's an error
string strAccount;
if (params.size() > 0)
strAccount = AccountFromValue(params[0]);
if (!pwalletMain->IsLocked())
pwalletMain->TopUpKeyPool();
// Generate a new key that is added to wallet
CPubKey newKey;
if (!pwalletMain->GetKeyFromPool(newKey, false))
throw JSONRPCError(RPC_WALLET_KEYPOOL_RAN_OUT, "Error: Keypool ran out, please call keypoolrefill first");
CKeyID keyID = newKey.GetID();
pwalletMain->SetAddressBookName(keyID, strAccount);
vector<unsigned char> vchPubKey = newKey.Raw();
return HexStr(vchPubKey.begin(), vchPubKey.end());
}
Value getnewaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw runtime_error(
"getnewaddress [account]\n"
"Returns a new Shitcoin address for receiving payments. "
"If [account] is specified (recommended), it is added to the address book "
"so payments received with the address will be credited to [account].");
// Parse the account first so we don't generate a key if there's an error
string strAccount;
if (params.size() > 0)
strAccount = AccountFromValue(params[0]);
if (!pwalletMain->IsLocked())
pwalletMain->TopUpKeyPool();
// Generate a new key that is added to wallet
CPubKey newKey;
if (!pwalletMain->GetKeyFromPool(newKey, false))
throw JSONRPCError(RPC_WALLET_KEYPOOL_RAN_OUT, "Error: Keypool ran out, please call keypoolrefill first");
CKeyID keyID = newKey.GetID();
pwalletMain->SetAddressBookName(keyID, strAccount);
return CBitcoinAddress(keyID).ToString();
}
CBitcoinAddress GetAccountAddress(string strAccount, bool bForceNew=false)
{
CWalletDB walletdb(pwalletMain->strWalletFile);
CAccount account;
walletdb.ReadAccount(strAccount, account);
bool bKeyUsed = false;
// Check if the current key has been used
if (account.vchPubKey.IsValid())
{
CScript scriptPubKey;
scriptPubKey.SetDestination(account.vchPubKey.GetID());
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin();
it != pwalletMain->mapWallet.end() && account.vchPubKey.IsValid();
++it)
{
const CWalletTx& wtx = (*it).second;
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
if (txout.scriptPubKey == scriptPubKey)
bKeyUsed = true;
}
}
// Generate a new key
if (!account.vchPubKey.IsValid() || bForceNew || bKeyUsed)
{
if (!pwalletMain->GetKeyFromPool(account.vchPubKey, false))
throw JSONRPCError(RPC_WALLET_KEYPOOL_RAN_OUT, "Error: Keypool ran out, please call keypoolrefill first");
pwalletMain->SetAddressBookName(account.vchPubKey.GetID(), strAccount);
walletdb.WriteAccount(strAccount, account);
}
return CBitcoinAddress(account.vchPubKey.GetID());
}
Value getaccountaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"getaccountaddress <account>\n"
"Returns the current Shitcoin address for receiving payments to this account.");
// Parse the account first so we don't generate a key if there's an error
string strAccount = AccountFromValue(params[0]);
Value ret;
ret = GetAccountAddress(strAccount).ToString();
return ret;
}
Value setaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"setaccount <Shitcoinaddress> <account>\n"
"Sets the account associated with the given address.");
CBitcoinAddress address(params[0].get_str());
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid Shitcoin address");
string strAccount;
if (params.size() > 1)
strAccount = AccountFromValue(params[1]);
// Detect when changing the account of an address that is the 'unused current key' of another account:
if (pwalletMain->mapAddressBook.count(address.Get()))
{
string strOldAccount = pwalletMain->mapAddressBook[address.Get()];
if (address == GetAccountAddress(strOldAccount))
GetAccountAddress(strOldAccount, true);
}
pwalletMain->SetAddressBookName(address.Get(), strAccount);
return Value::null;
}
Value getaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"getaccount <Shitcoinaddress>\n"
"Returns the account associated with the given address.");
CBitcoinAddress address(params[0].get_str());
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid Shitcoin address");
string strAccount;
map<CTxDestination, string>::iterator mi = pwalletMain->mapAddressBook.find(address.Get());
if (mi != pwalletMain->mapAddressBook.end() && !(*mi).second.empty())
strAccount = (*mi).second;
return strAccount;
}
Value getaddressesbyaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"getaddressesbyaccount <account>\n"
"Returns the list of addresses for the given account.");
string strAccount = AccountFromValue(params[0]);
// Find all addresses that have the given account
Array ret;
BOOST_FOREACH(const PAIRTYPE(CBitcoinAddress, string)& item, pwalletMain->mapAddressBook)
{
const CBitcoinAddress& address = item.first;
const string& strName = item.second;
if (strName == strAccount)
ret.push_back(address.ToString());
}
return ret;
}
Value sendtoaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 2 || params.size() > 4)
throw runtime_error(
"sendtoaddress <Shitcoinaddress> <amount> [comment] [comment-to]\n"
"<amount> is a real and is rounded to the nearest 0.000001"
+ HelpRequiringPassphrase());
CBitcoinAddress address(params[0].get_str());
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid Shitcoin address");
// Amount
int64 nAmount = AmountFromValue(params[1]);
if (nAmount < MIN_TXOUT_AMOUNT)
throw JSONRPCError(-101, "Send amount too small");
// Wallet comments
CWalletTx wtx;
if (params.size() > 2 && params[2].type() != null_type && !params[2].get_str().empty())
wtx.mapValue["comment"] = params[2].get_str();
if (params.size() > 3 && params[3].type() != null_type && !params[3].get_str().empty())
wtx.mapValue["to"] = params[3].get_str();
if (pwalletMain->IsLocked())
throw JSONRPCError(RPC_WALLET_UNLOCK_NEEDED, "Error: Please enter the wallet passphrase with walletpassphrase first.");
string strError = pwalletMain->SendMoneyToDestination(address.Get(), nAmount, wtx);
if (strError != "")
throw JSONRPCError(RPC_WALLET_ERROR, strError);
return wtx.GetHash().GetHex();
}
Value listaddressgroupings(const Array& params, bool fHelp)
{
if (fHelp)
throw runtime_error(
"listaddressgroupings\n"
"Lists groups of addresses which have had their common ownership\n"
"made public by common use as inputs or as the resulting change\n"
"in past transactions");
Array jsonGroupings;
map<CTxDestination, int64> balances = pwalletMain->GetAddressBalances();
BOOST_FOREACH(set<CTxDestination> grouping, pwalletMain->GetAddressGroupings())
{
Array jsonGrouping;
BOOST_FOREACH(CTxDestination address, grouping)
{
Array addressInfo;
addressInfo.push_back(CBitcoinAddress(address).ToString());
addressInfo.push_back(ValueFromAmount(balances[address]));
{
LOCK(pwalletMain->cs_wallet);
if (pwalletMain->mapAddressBook.find(CBitcoinAddress(address).Get()) != pwalletMain->mapAddressBook.end())
addressInfo.push_back(pwalletMain->mapAddressBook.find(CBitcoinAddress(address).Get())->second);
}
jsonGrouping.push_back(addressInfo);
}
jsonGroupings.push_back(jsonGrouping);
}
return jsonGroupings;
}
Value signmessage(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 2)
throw runtime_error(
"signmessage <Shitcoinaddress> <message>\n"
"Sign a message with the private key of an address");
EnsureWalletIsUnlocked();
string strAddress = params[0].get_str();
string strMessage = params[1].get_str();
CBitcoinAddress addr(strAddress);
if (!addr.IsValid())
throw JSONRPCError(RPC_TYPE_ERROR, "Invalid address");
CKeyID keyID;
if (!addr.GetKeyID(keyID))
throw JSONRPCError(RPC_TYPE_ERROR, "Address does not refer to key");
CKey key;
if (!pwalletMain->GetKey(keyID, key))
throw JSONRPCError(RPC_WALLET_ERROR, "Private key not available");
CDataStream ss(SER_GETHASH, 0);
ss << strMessageMagic;
ss << strMessage;
vector<unsigned char> vchSig;
if (!key.SignCompact(Hash(ss.begin(), ss.end()), vchSig))
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Sign failed");
return EncodeBase64(&vchSig[0], vchSig.size());
}
Value verifymessage(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 3)
throw runtime_error(
"verifymessage <Shitcoinaddress> <signature> <message>\n"
"Verify a signed message");
string strAddress = params[0].get_str();
string strSign = params[1].get_str();
string strMessage = params[2].get_str();
CBitcoinAddress addr(strAddress);
if (!addr.IsValid())
throw JSONRPCError(RPC_TYPE_ERROR, "Invalid address");
CKeyID keyID;
if (!addr.GetKeyID(keyID))
throw JSONRPCError(RPC_TYPE_ERROR, "Address does not refer to key");
bool fInvalid = false;
vector<unsigned char> vchSig = DecodeBase64(strSign.c_str(), &fInvalid);
if (fInvalid)
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Malformed base64 encoding");
CDataStream ss(SER_GETHASH, 0);
ss << strMessageMagic;
ss << strMessage;
CKey key;
if (!key.SetCompactSignature(Hash(ss.begin(), ss.end()), vchSig))
return false;
return (key.GetPubKey().GetID() == keyID);
}
Value getreceivedbyaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"getreceivedbyaddress <Shitcoinaddress> [minconf=1]\n"
"Returns the total amount received by <Shitcoinaddress> in transactions with at least [minconf] confirmations.");
// Bitcoin address
CBitcoinAddress address = CBitcoinAddress(params[0].get_str());
CScript scriptPubKey;
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid Shitcoin address");
scriptPubKey.SetDestination(address.Get());
if (!IsMine(*pwalletMain,scriptPubKey))
return (double)0.0;
// Minimum confirmations
int nMinDepth = 1;
if (params.size() > 1)
nMinDepth = params[1].get_int();
// Tally
int64 nAmount = 0;
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (wtx.IsCoinBase() || wtx.IsCoinStake() || !wtx.IsFinal())
continue;
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
if (txout.scriptPubKey == scriptPubKey)
if (wtx.GetDepthInMainChain() >= nMinDepth)
nAmount += txout.nValue;
}
return ValueFromAmount(nAmount);
}
void GetAccountAddresses(string strAccount, set<CTxDestination>& setAddress)
{
BOOST_FOREACH(const PAIRTYPE(CTxDestination, string)& item, pwalletMain->mapAddressBook)
{
const CTxDestination& address = item.first;
const string& strName = item.second;
if (strName == strAccount)
setAddress.insert(address);
}
}
Value getreceivedbyaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"getreceivedbyaccount <account> [minconf=1]\n"
"Returns the total amount received by addresses with <account> in transactions with at least [minconf] confirmations.");
// Minimum confirmations
int nMinDepth = 1;
if (params.size() > 1)
nMinDepth = params[1].get_int();
// Get the set of pub keys assigned to account
string strAccount = AccountFromValue(params[0]);
set<CTxDestination> setAddress;
GetAccountAddresses(strAccount, setAddress);
// Tally
int64 nAmount = 0;
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (wtx.IsCoinBase() || wtx.IsCoinStake() || !wtx.IsFinal())
continue;
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
{
CTxDestination address;
if (ExtractDestination(txout.scriptPubKey, address) && IsMine(*pwalletMain, address) && setAddress.count(address))
if (wtx.GetDepthInMainChain() >= nMinDepth)
nAmount += txout.nValue;
}
}
return (double)nAmount / (double)COIN;
}
int64 GetAccountBalance(CWalletDB& walletdb, const string& strAccount, int nMinDepth)
{
int64 nBalance = 0;
// Tally wallet transactions
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (!wtx.IsFinal())
continue;
int64 nGenerated, nReceived, nSent, nFee;
wtx.GetAccountAmounts(strAccount, nGenerated, nReceived, nSent, nFee);
if (nReceived != 0 && wtx.GetDepthInMainChain() >= nMinDepth)
nBalance += nReceived;
nBalance += nGenerated - nSent - nFee;
}
// Tally internal accounting entries
nBalance += walletdb.GetAccountCreditDebit(strAccount);
return nBalance;
}
int64 GetAccountBalance(const string& strAccount, int nMinDepth)
{
CWalletDB walletdb(pwalletMain->strWalletFile);
return GetAccountBalance(walletdb, strAccount, nMinDepth);
}
Value getbalance(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw runtime_error(
"getbalance [account] [minconf=1]\n"
"If [account] is not specified, returns the server's total available balance.\n"
"If [account] is specified, returns the balance in the account.");
if (params.size() == 0)
return ValueFromAmount(pwalletMain->GetBalance());
int nMinDepth = 1;
if (params.size() > 1)
nMinDepth = params[1].get_int();
if (params[0].get_str() == "*") {
// Calculate total balance a different way from GetBalance()
// (GetBalance() sums up all unspent TxOuts)
// getbalance and getbalance '*' should always return the same number.
int64 nBalance = 0;
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (!wtx.IsFinal())
continue;
int64 allGeneratedImmature, allGeneratedMature, allFee;
allGeneratedImmature = allGeneratedMature = allFee = 0;
string strSentAccount;
list<pair<CTxDestination, int64> > listReceived;
list<pair<CTxDestination, int64> > listSent;
wtx.GetAmounts(allGeneratedImmature, allGeneratedMature, listReceived, listSent, allFee, strSentAccount);
if (wtx.GetDepthInMainChain() >= nMinDepth)
{
BOOST_FOREACH(const PAIRTYPE(CTxDestination,int64)& r, listReceived)
nBalance += r.second;
}
BOOST_FOREACH(const PAIRTYPE(CTxDestination,int64)& r, listSent)
nBalance -= r.second;
nBalance -= allFee;
nBalance += allGeneratedMature;
}
return ValueFromAmount(nBalance);
}
string strAccount = AccountFromValue(params[0]);
int64 nBalance = GetAccountBalance(strAccount, nMinDepth);
return ValueFromAmount(nBalance);
}
Value movecmd(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 3 || params.size() > 5)
throw runtime_error(
"move <fromaccount> <toaccount> <amount> [minconf=1] [comment]\n"
"Move from one account in your wallet to another.");
string strFrom = AccountFromValue(params[0]);
string strTo = AccountFromValue(params[1]);
int64 nAmount = AmountFromValue(params[2]);
if (nAmount < MIN_TXOUT_AMOUNT)
throw JSONRPCError(-101, "Send amount too small");
if (params.size() > 3)
// unused parameter, used to be nMinDepth, keep type-checking it though
(void)params[3].get_int();
string strComment;
if (params.size() > 4)
strComment = params[4].get_str();
CWalletDB walletdb(pwalletMain->strWalletFile);
if (!walletdb.TxnBegin())
throw JSONRPCError(RPC_DATABASE_ERROR, "database error");
int64 nNow = GetAdjustedTime();
// Debit
CAccountingEntry debit;
debit.nOrderPos = pwalletMain->IncOrderPosNext(&walletdb);
debit.strAccount = strFrom;
debit.nCreditDebit = -nAmount;
debit.nTime = nNow;
debit.strOtherAccount = strTo;
debit.strComment = strComment;
walletdb.WriteAccountingEntry(debit);
// Credit
CAccountingEntry credit;
credit.nOrderPos = pwalletMain->IncOrderPosNext(&walletdb);
credit.strAccount = strTo;
credit.nCreditDebit = nAmount;
credit.nTime = nNow;
credit.strOtherAccount = strFrom;
credit.strComment = strComment;
walletdb.WriteAccountingEntry(credit);
if (!walletdb.TxnCommit())
throw JSONRPCError(RPC_DATABASE_ERROR, "database error");
return true;
}
Value sendfrom(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 3 || params.size() > 6)
throw runtime_error(
"sendfrom <fromaccount> <toShitcoinaddress> <amount> [minconf=1] [comment] [comment-to]\n"
"<amount> is a real and is rounded to the nearest 0.000001"
+ HelpRequiringPassphrase());
string strAccount = AccountFromValue(params[0]);
CBitcoinAddress address(params[1].get_str());
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid Shitcoin address");
int64 nAmount = AmountFromValue(params[2]);
if (nAmount < MIN_TXOUT_AMOUNT)
throw JSONRPCError(-101, "Send amount too small");
int nMinDepth = 1;
if (params.size() > 3)
nMinDepth = params[3].get_int();
CWalletTx wtx;
wtx.strFromAccount = strAccount;
if (params.size() > 4 && params[4].type() != null_type && !params[4].get_str().empty())
wtx.mapValue["comment"] = params[4].get_str();
if (params.size() > 5 && params[5].type() != null_type && !params[5].get_str().empty())
wtx.mapValue["to"] = params[5].get_str();
EnsureWalletIsUnlocked();
// Check funds
int64 nBalance = GetAccountBalance(strAccount, nMinDepth);
if (nAmount > nBalance)
throw JSONRPCError(RPC_WALLET_INSUFFICIENT_FUNDS, "Account has insufficient funds");
// Send
string strError = pwalletMain->SendMoneyToDestination(address.Get(), nAmount, wtx);
if (strError != "")
throw JSONRPCError(RPC_WALLET_ERROR, strError);
return wtx.GetHash().GetHex();
}
Value sendmany(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 2 || params.size() > 4)
throw runtime_error(
"sendmany <fromaccount> {address:amount,...} [minconf=1] [comment]\n"
"amounts are double-precision floating point numbers"
+ HelpRequiringPassphrase());
string strAccount = AccountFromValue(params[0]);
Object sendTo = params[1].get_obj();
int nMinDepth = 1;
if (params.size() > 2)
nMinDepth = params[2].get_int();
CWalletTx wtx;
wtx.strFromAccount = strAccount;
if (params.size() > 3 && params[3].type() != null_type && !params[3].get_str().empty())
wtx.mapValue["comment"] = params[3].get_str();
set<CBitcoinAddress> setAddress;
vector<pair<CScript, int64> > vecSend;
int64 totalAmount = 0;
BOOST_FOREACH(const Pair& s, sendTo)
{
CBitcoinAddress address(s.name_);
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, string("Invalid Shitcoin address: ")+s.name_);
if (setAddress.count(address))
throw JSONRPCError(RPC_INVALID_PARAMETER, string("Invalid parameter, duplicated address: ")+s.name_);
setAddress.insert(address);
CScript scriptPubKey;
scriptPubKey.SetDestination(address.Get());
int64 nAmount = AmountFromValue(s.value_);
if (nAmount < MIN_TXOUT_AMOUNT)
throw JSONRPCError(-101, "Send amount too small");
totalAmount += nAmount;
vecSend.push_back(make_pair(scriptPubKey, nAmount));
}
EnsureWalletIsUnlocked();
// Check funds
int64 nBalance = GetAccountBalance(strAccount, nMinDepth);
if (totalAmount > nBalance)
throw JSONRPCError(RPC_WALLET_INSUFFICIENT_FUNDS, "Account has insufficient funds");
// Send
CReserveKey keyChange(pwalletMain);
int64 nFeeRequired = 0;
bool fCreated = pwalletMain->CreateTransaction(vecSend, wtx, keyChange, nFeeRequired);
if (!fCreated)
{
if (totalAmount + nFeeRequired > pwalletMain->GetBalance())
throw JSONRPCError(RPC_WALLET_INSUFFICIENT_FUNDS, "Insufficient funds");
throw JSONRPCError(RPC_WALLET_ERROR, "Transaction creation failed");
}
if (!pwalletMain->CommitTransaction(wtx, keyChange))
throw JSONRPCError(RPC_WALLET_ERROR, "Transaction commit failed");
return wtx.GetHash().GetHex();
}
Value addmultisigaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 2 || params.size() > 3)
{
string msg = "addmultisigaddress <nrequired> <'[\"key\",\"key\"]'> [account]\n"
"Add a nrequired-to-sign multisignature address to the wallet\"\n"
"each key is a Shitcoin address or hex-encoded public key\n"
"If [account] is specified, assign address to [account].";
throw runtime_error(msg);
}
int nRequired = params[0].get_int();
const Array& keys = params[1].get_array();
string strAccount;
if (params.size() > 2)
strAccount = AccountFromValue(params[2]);
// Gather public keys
if (nRequired < 1)
throw runtime_error("a multisignature address must require at least one key to redeem");
if ((int)keys.size() < nRequired)
throw runtime_error(
strprintf("not enough keys supplied "
"(got %"PRIszu" keys, but need at least %d to redeem)", keys.size(), nRequired));
std::vector<CKey> pubkeys;
pubkeys.resize(keys.size());
for (unsigned int i = 0; i < keys.size(); i++)
{
const std::string& ks = keys[i].get_str();
// Case 1: Bitcoin address and we have full public key:
CBitcoinAddress address(ks);
if (address.IsValid())
{
CKeyID keyID;
if (!address.GetKeyID(keyID))
throw runtime_error(
strprintf("%s does not refer to a key",ks.c_str()));
CPubKey vchPubKey;
if (!pwalletMain->GetPubKey(keyID, vchPubKey))
throw runtime_error(
strprintf("no full public key for address %s",ks.c_str()));
if (!vchPubKey.IsValid() || !pubkeys[i].SetPubKey(vchPubKey))
throw runtime_error(" Invalid public key: "+ks);
}
// Case 2: hex public key
else if (IsHex(ks))
{
CPubKey vchPubKey(ParseHex(ks));
if (!vchPubKey.IsValid() || !pubkeys[i].SetPubKey(vchPubKey))
throw runtime_error(" Invalid public key: "+ks);
}
else
{
throw runtime_error(" Invalid public key: "+ks);
}
}
// Construct using pay-to-script-hash:
CScript inner;
inner.SetMultisig(nRequired, pubkeys);
CScriptID innerID = inner.GetID();
pwalletMain->AddCScript(inner);
pwalletMain->SetAddressBookName(innerID, strAccount);
return CBitcoinAddress(innerID).ToString();
}
struct tallyitem
{
int64 nAmount;
int nConf;
tallyitem()
{
nAmount = 0;
nConf = std::numeric_limits<int>::max();
}
};
Value ListReceived(const Array& params, bool fByAccounts)
{
// Minimum confirmations
int nMinDepth = 1;
if (params.size() > 0)
nMinDepth = params[0].get_int();
// Whether to include empty accounts
bool fIncludeEmpty = false;
if (params.size() > 1)
fIncludeEmpty = params[1].get_bool();
// Tally
map<CBitcoinAddress, tallyitem> mapTally;
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (wtx.IsCoinBase() || wtx.IsCoinStake() || !wtx.IsFinal())
continue;
int nDepth = wtx.GetDepthInMainChain();
if (nDepth < nMinDepth)
continue;
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
{
CTxDestination address;
if (!ExtractDestination(txout.scriptPubKey, address) || !IsMine(*pwalletMain, address))
continue;
tallyitem& item = mapTally[address];
item.nAmount += txout.nValue;
item.nConf = min(item.nConf, nDepth);
}
}
// Reply
Array ret;
map<string, tallyitem> mapAccountTally;
BOOST_FOREACH(const PAIRTYPE(CBitcoinAddress, string)& item, pwalletMain->mapAddressBook)
{
const CBitcoinAddress& address = item.first;
const string& strAccount = item.second;
map<CBitcoinAddress, tallyitem>::iterator it = mapTally.find(address);
if (it == mapTally.end() && !fIncludeEmpty)
continue;
int64 nAmount = 0;
int nConf = std::numeric_limits<int>::max();
if (it != mapTally.end())
{
nAmount = (*it).second.nAmount;
nConf = (*it).second.nConf;
}
if (fByAccounts)
{
tallyitem& item = mapAccountTally[strAccount];
item.nAmount += nAmount;
item.nConf = min(item.nConf, nConf);
}
else
{
Object obj;
obj.push_back(Pair("address", address.ToString()));
obj.push_back(Pair("account", strAccount));
obj.push_back(Pair("amount", ValueFromAmount(nAmount)));
obj.push_back(Pair("confirmations", (nConf == std::numeric_limits<int>::max() ? 0 : nConf)));
ret.push_back(obj);
}
}
if (fByAccounts)
{
for (map<string, tallyitem>::iterator it = mapAccountTally.begin(); it != mapAccountTally.end(); ++it)
{
int64 nAmount = (*it).second.nAmount;
int nConf = (*it).second.nConf;
Object obj;
obj.push_back(Pair("account", (*it).first));
obj.push_back(Pair("amount", ValueFromAmount(nAmount)));
obj.push_back(Pair("confirmations", (nConf == std::numeric_limits<int>::max() ? 0 : nConf)));
ret.push_back(obj);
}
}
return ret;
}
Value listreceivedbyaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw runtime_error(
"listreceivedbyaddress [minconf=1] [includeempty=false]\n"
"[minconf] is the minimum number of confirmations before payments are included.\n"
"[includeempty] whether to include addresses that haven't received any payments.\n"
"Returns an array of objects containing:\n"
" \"address\" : receiving address\n"
" \"account\" : the account of the receiving address\n"
" \"amount\" : total amount received by the address\n"
" \"confirmations\" : number of confirmations of the most recent transaction included");
return ListReceived(params, false);
}
Value listreceivedbyaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw runtime_error(
"listreceivedbyaccount [minconf=1] [includeempty=false]\n"
"[minconf] is the minimum number of confirmations before payments are included.\n"
"[includeempty] whether to include accounts that haven't received any payments.\n"
"Returns an array of objects containing:\n"
" \"account\" : the account of the receiving addresses\n"
" \"amount\" : total amount received by addresses with this account\n"
" \"confirmations\" : number of confirmations of the most recent transaction included");
return ListReceived(params, true);
}
void ListTransactions(const CWalletTx& wtx, const string& strAccount, int nMinDepth, bool fLong, Array& ret)
{
int64 nGeneratedImmature, nGeneratedMature, nFee;
string strSentAccount;
list<pair<CTxDestination, int64> > listReceived;
list<pair<CTxDestination, int64> > listSent;
wtx.GetAmounts(nGeneratedImmature, nGeneratedMature, listReceived, listSent, nFee, strSentAccount);
bool fAllAccounts = (strAccount == string("*"));
// Generated blocks assigned to account ""
if ((nGeneratedMature+nGeneratedImmature) != 0 && (fAllAccounts || strAccount == ""))
{
Object entry;
entry.push_back(Pair("account", string("")));
if (nGeneratedImmature)
{
entry.push_back(Pair("category", wtx.GetDepthInMainChain() ? "immature" : "orphan"));
entry.push_back(Pair("amount", ValueFromAmount(nGeneratedImmature)));
}
else
{
entry.push_back(Pair("category", "generate"));
entry.push_back(Pair("amount", ValueFromAmount(nGeneratedMature)));
}
if (fLong)
WalletTxToJSON(wtx, entry);
ret.push_back(entry);
}
// Sent
if ((!listSent.empty() || nFee != 0) && (fAllAccounts || strAccount == strSentAccount))
{
BOOST_FOREACH(const PAIRTYPE(CTxDestination, int64)& s, listSent)
{
Object entry;
entry.push_back(Pair("account", strSentAccount));
entry.push_back(Pair("address", CBitcoinAddress(s.first).ToString()));
entry.push_back(Pair("category", "send"));
entry.push_back(Pair("amount", ValueFromAmount(-s.second)));
entry.push_back(Pair("fee", ValueFromAmount(-nFee)));
if (fLong)
WalletTxToJSON(wtx, entry);
ret.push_back(entry);
}
}
// Received
if (listReceived.size() > 0 && wtx.GetDepthInMainChain() >= nMinDepth)
{
BOOST_FOREACH(const PAIRTYPE(CTxDestination, int64)& r, listReceived)
{
string account;
if (pwalletMain->mapAddressBook.count(r.first))
account = pwalletMain->mapAddressBook[r.first];
if (fAllAccounts || (account == strAccount))
{
Object entry;
entry.push_back(Pair("account", account));
entry.push_back(Pair("address", CBitcoinAddress(r.first).ToString()));
if (wtx.IsCoinBase())
{
if (wtx.GetDepthInMainChain() < 1)
entry.push_back(Pair("category", "orphan"));
else if (wtx.GetBlocksToMaturity() > 0)
entry.push_back(Pair("category", "immature"));
else
entry.push_back(Pair("category", "generate"));
}
else
entry.push_back(Pair("category", "receive"));
entry.push_back(Pair("amount", ValueFromAmount(r.second)));
if (fLong)
WalletTxToJSON(wtx, entry);
ret.push_back(entry);
}
}
}
}
void AcentryToJSON(const CAccountingEntry& acentry, const string& strAccount, Array& ret)
{
bool fAllAccounts = (strAccount == string("*"));
if (fAllAccounts || acentry.strAccount == strAccount)
{
Object entry;
entry.push_back(Pair("account", acentry.strAccount));
entry.push_back(Pair("category", "move"));
entry.push_back(Pair("time", (boost::int64_t)acentry.nTime));
entry.push_back(Pair("amount", ValueFromAmount(acentry.nCreditDebit)));
entry.push_back(Pair("otheraccount", acentry.strOtherAccount));
entry.push_back(Pair("comment", acentry.strComment));
ret.push_back(entry);
}
}
Value listtransactions(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 3)
throw runtime_error(
"listtransactions [account] [count=10] [from=0]\n"
"Returns up to [count] most recent transactions skipping the first [from] transactions for account [account].");
string strAccount = "*";
if (params.size() > 0)
strAccount = params[0].get_str();
int nCount = 10;
if (params.size() > 1)
nCount = params[1].get_int();
int nFrom = 0;
if (params.size() > 2)
nFrom = params[2].get_int();
if (nCount < 0)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Negative count");
if (nFrom < 0)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Negative from");
Array ret;
std::list<CAccountingEntry> acentries;
CWallet::TxItems txOrdered = pwalletMain->OrderedTxItems(acentries, strAccount);
// iterate backwards until we have nCount items to return:
for (CWallet::TxItems::reverse_iterator it = txOrdered.rbegin(); it != txOrdered.rend(); ++it)
{
CWalletTx *const pwtx = (*it).second.first;
if (pwtx != 0)
ListTransactions(*pwtx, strAccount, 0, true, ret);
CAccountingEntry *const pacentry = (*it).second.second;
if (pacentry != 0)
AcentryToJSON(*pacentry, strAccount, ret);
if ((int)ret.size() >= (nCount+nFrom)) break;
}
// ret is newest to oldest
if (nFrom > (int)ret.size())
nFrom = ret.size();
if ((nFrom + nCount) > (int)ret.size())
nCount = ret.size() - nFrom;
Array::iterator first = ret.begin();
std::advance(first, nFrom);
Array::iterator last = ret.begin();
std::advance(last, nFrom+nCount);
if (last != ret.end()) ret.erase(last, ret.end());
if (first != ret.begin()) ret.erase(ret.begin(), first);
std::reverse(ret.begin(), ret.end()); // Return oldest to newest
return ret;
}
Value listaccounts(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw runtime_error(
"listaccounts [minconf=1]\n"
"Returns Object that has account names as keys, account balances as values.");
int nMinDepth = 1;
if (params.size() > 0)
nMinDepth = params[0].get_int();
map<string, int64> mapAccountBalances;
BOOST_FOREACH(const PAIRTYPE(CTxDestination, string)& entry, pwalletMain->mapAddressBook) {
if (IsMine(*pwalletMain, entry.first)) // This address belongs to me
mapAccountBalances[entry.second] = 0;
}
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
int64 nGeneratedImmature, nGeneratedMature, nFee;
string strSentAccount;
list<pair<CTxDestination, int64> > listReceived;
list<pair<CTxDestination, int64> > listSent;
wtx.GetAmounts(nGeneratedImmature, nGeneratedMature, listReceived, listSent, nFee, strSentAccount);
mapAccountBalances[strSentAccount] -= nFee;
BOOST_FOREACH(const PAIRTYPE(CTxDestination, int64)& s, listSent)
mapAccountBalances[strSentAccount] -= s.second;
if (wtx.GetDepthInMainChain() >= nMinDepth)
{
mapAccountBalances[""] += nGeneratedMature;
BOOST_FOREACH(const PAIRTYPE(CTxDestination, int64)& r, listReceived)
if (pwalletMain->mapAddressBook.count(r.first))
mapAccountBalances[pwalletMain->mapAddressBook[r.first]] += r.second;
else
mapAccountBalances[""] += r.second;
}
}
list<CAccountingEntry> acentries;
CWalletDB(pwalletMain->strWalletFile).ListAccountCreditDebit("*", acentries);
BOOST_FOREACH(const CAccountingEntry& entry, acentries)
mapAccountBalances[entry.strAccount] += entry.nCreditDebit;
Object ret;
BOOST_FOREACH(const PAIRTYPE(string, int64)& accountBalance, mapAccountBalances) {
ret.push_back(Pair(accountBalance.first, ValueFromAmount(accountBalance.second)));
}
return ret;
}
Value listsinceblock(const Array& params, bool fHelp)
{
if (fHelp)
throw runtime_error(
"listsinceblock [blockhash] [target-confirmations]\n"
"Get all transactions in blocks since block [blockhash], or all transactions if omitted");
CBlockIndex *pindex = NULL;
int target_confirms = 1;
if (params.size() > 0)
{
uint256 blockId = 0;
blockId.SetHex(params[0].get_str());
pindex = CBlockLocator(blockId).GetBlockIndex();
}
if (params.size() > 1)
{
target_confirms = params[1].get_int();
if (target_confirms < 1)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter");
}
int depth = pindex ? (1 + nBestHeight - pindex->nHeight) : -1;
Array transactions;
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); it++)
{
CWalletTx tx = (*it).second;
if (depth == -1 || tx.GetDepthInMainChain() < depth)
ListTransactions(tx, "*", 0, true, transactions);
}
uint256 lastblock;
if (target_confirms == 1)
{
lastblock = hashBestChain;
}
else
{
int target_height = pindexBest->nHeight + 1 - target_confirms;
CBlockIndex *block;
for (block = pindexBest;
block && block->nHeight > target_height;
block = block->pprev) { }
lastblock = block ? block->GetBlockHash() : 0;
}
Object ret;
ret.push_back(Pair("transactions", transactions));
ret.push_back(Pair("lastblock", lastblock.GetHex()));
return ret;
}
Value gettransaction(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"gettransaction <txid>\n"
"Get detailed information about <txid>");
uint256 hash;
hash.SetHex(params[0].get_str());
Object entry;
if (pwalletMain->mapWallet.count(hash))
{
const CWalletTx& wtx = pwalletMain->mapWallet[hash];
TxToJSON(wtx, 0, entry);
int64 nCredit = wtx.GetCredit();
int64 nDebit = wtx.GetDebit();
int64 nNet = nCredit - nDebit;
int64 nFee = (wtx.IsFromMe() ? wtx.GetValueOut() - nDebit : 0);
entry.push_back(Pair("amount", ValueFromAmount(nNet - nFee)));
if (wtx.IsFromMe())
entry.push_back(Pair("fee", ValueFromAmount(nFee)));
WalletTxToJSON(wtx, entry);
Array details;
ListTransactions(pwalletMain->mapWallet[hash], "*", 0, false, details);
entry.push_back(Pair("details", details));
}
else
{
CTransaction tx;
uint256 hashBlock = 0;
if (GetTransaction(hash, tx, hashBlock))
{
entry.push_back(Pair("txid", hash.GetHex()));
TxToJSON(tx, 0, entry);
if (hashBlock == 0)
entry.push_back(Pair("confirmations", 0));
else
{
entry.push_back(Pair("blockhash", hashBlock.GetHex()));
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashBlock);
if (mi != mapBlockIndex.end() && (*mi).second)
{
CBlockIndex* pindex = (*mi).second;
if (pindex->IsInMainChain())
{
entry.push_back(Pair("confirmations", 1 + nBestHeight - pindex->nHeight));
entry.push_back(Pair("txntime", (boost::int64_t)tx.nTime));
entry.push_back(Pair("time", (boost::int64_t)pindex->nTime));
}
else
entry.push_back(Pair("confirmations", 0));
}
}
}
else
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "No information available about transaction");
}
return entry;
}
Value backupwallet(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"backupwallet <destination>\n"
"Safely copies wallet.dat to destination, which can be a directory or a path with filename.");
string strDest = params[0].get_str();
if (!BackupWallet(*pwalletMain, strDest))
throw JSONRPCError(RPC_WALLET_ERROR, "Error: Wallet backup failed!");
return Value::null;
}
Value keypoolrefill(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 0)
throw runtime_error(
"keypoolrefill\n"
"Fills the keypool."
+ HelpRequiringPassphrase());
EnsureWalletIsUnlocked();
pwalletMain->TopUpKeyPool();
if (pwalletMain->GetKeyPoolSize() < GetArg("-keypool", 100))
throw JSONRPCError(RPC_WALLET_ERROR, "Error refreshing keypool.");
return Value::null;
}
void ThreadTopUpKeyPool(void* parg)
{
// Make this thread recognisable as the key-topping-up thread
RenameThread("bitcoin-key-top");
pwalletMain->TopUpKeyPool();
}
void ThreadCleanWalletPassphrase(void* parg)
{
// Make this thread recognisable as the wallet relocking thread
RenameThread("bitcoin-lock-wa");
int64 nMyWakeTime = GetTimeMillis() + *((int64*)parg) * 1000;
ENTER_CRITICAL_SECTION(cs_nWalletUnlockTime);
if (nWalletUnlockTime == 0)
{
nWalletUnlockTime = nMyWakeTime;
do
{
if (nWalletUnlockTime==0)
break;
int64 nToSleep = nWalletUnlockTime - GetTimeMillis();
if (nToSleep <= 0)
break;
LEAVE_CRITICAL_SECTION(cs_nWalletUnlockTime);
Sleep(nToSleep);
ENTER_CRITICAL_SECTION(cs_nWalletUnlockTime);
} while(1);
if (nWalletUnlockTime)
{
nWalletUnlockTime = 0;
pwalletMain->Lock();
}
}
else
{
if (nWalletUnlockTime < nMyWakeTime)
nWalletUnlockTime = nMyWakeTime;
}
LEAVE_CRITICAL_SECTION(cs_nWalletUnlockTime);
delete (int64*)parg;
}
Value walletpassphrase(const Array& params, bool fHelp)
{
if (pwalletMain->IsCrypted() && (fHelp || params.size() < 2 || params.size() > 3))
throw runtime_error(
"walletpassphrase <passphrase> <timeout> [mintonly]\n"
"Stores the wallet decryption key in memory for <timeout> seconds.\n"
"mintonly is optional true/false allowing only block minting.");
if (fHelp)
return true;
if (!pwalletMain->IsCrypted())
throw JSONRPCError(RPC_WALLET_WRONG_ENC_STATE, "Error: running with an unencrypted wallet, but walletpassphrase was called.");
if (!pwalletMain->IsLocked())
throw JSONRPCError(RPC_WALLET_ALREADY_UNLOCKED, "Error: Wallet is already unlocked, use walletlock first if need to change unlock settings.");
// Note that the walletpassphrase is stored in params[0] which is not mlock()ed
SecureString strWalletPass;
strWalletPass.reserve(100);
// TODO: get rid of this .c_str() by implementing SecureString::operator=(std::string)
// Alternately, find a way to make params[0] mlock()'d to begin with.
strWalletPass = params[0].get_str().c_str();
if (strWalletPass.length() > 0)
{
if (!pwalletMain->Unlock(strWalletPass))
throw JSONRPCError(RPC_WALLET_PASSPHRASE_INCORRECT, "Error: The wallet passphrase entered was incorrect.");
}
else
throw runtime_error(
"walletpassphrase <passphrase> <timeout>\n"
"Stores the wallet decryption key in memory for <timeout> seconds.");
NewThread(ThreadTopUpKeyPool, NULL);
int64* pnSleepTime = new int64(params[1].get_int64());
NewThread(ThreadCleanWalletPassphrase, pnSleepTime);
// ppcoin: if user OS account compromised prevent trivial sendmoney commands
if (params.size() > 2)
fWalletUnlockMintOnly = params[2].get_bool();
else
fWalletUnlockMintOnly = false;
return Value::null;
}
Value walletpassphrasechange(const Array& params, bool fHelp)
{
if (pwalletMain->IsCrypted() && (fHelp || params.size() != 2))
throw runtime_error(
"walletpassphrasechange <oldpassphrase> <newpassphrase>\n"
"Changes the wallet passphrase from <oldpassphrase> to <newpassphrase>.");
if (fHelp)
return true;
if (!pwalletMain->IsCrypted())
throw JSONRPCError(RPC_WALLET_WRONG_ENC_STATE, "Error: running with an unencrypted wallet, but walletpassphrasechange was called.");
// TODO: get rid of these .c_str() calls by implementing SecureString::operator=(std::string)
// Alternately, find a way to make params[0] mlock()'d to begin with.
SecureString strOldWalletPass;
strOldWalletPass.reserve(100);
strOldWalletPass = params[0].get_str().c_str();
SecureString strNewWalletPass;
strNewWalletPass.reserve(100);
strNewWalletPass = params[1].get_str().c_str();
if (strOldWalletPass.length() < 1 || strNewWalletPass.length() < 1)
throw runtime_error(
"walletpassphrasechange <oldpassphrase> <newpassphrase>\n"
"Changes the wallet passphrase from <oldpassphrase> to <newpassphrase>.");
if (!pwalletMain->ChangeWalletPassphrase(strOldWalletPass, strNewWalletPass))
throw JSONRPCError(RPC_WALLET_PASSPHRASE_INCORRECT, "Error: The wallet passphrase entered was incorrect.");
return Value::null;
}
Value walletlock(const Array& params, bool fHelp)
{
if (pwalletMain->IsCrypted() && (fHelp || params.size() != 0))
throw runtime_error(
"walletlock\n"
"Removes the wallet encryption key from memory, locking the wallet.\n"
"After calling this method, you will need to call walletpassphrase again\n"
"before being able to call any methods which require the wallet to be unlocked.");
if (fHelp)
return true;
if (!pwalletMain->IsCrypted())
throw JSONRPCError(RPC_WALLET_WRONG_ENC_STATE, "Error: running with an unencrypted wallet, but walletlock was called.");
{
LOCK(cs_nWalletUnlockTime);
pwalletMain->Lock();
nWalletUnlockTime = 0;
}
return Value::null;
}
Value encryptwallet(const Array& params, bool fHelp)
{
if (!pwalletMain->IsCrypted() && (fHelp || params.size() != 1))
throw runtime_error(
"encryptwallet <passphrase>\n"
"Encrypts the wallet with <passphrase>.");
if (fHelp)
return true;
if (pwalletMain->IsCrypted())
throw JSONRPCError(RPC_WALLET_WRONG_ENC_STATE, "Error: running with an encrypted wallet, but encryptwallet was called.");
// TODO: get rid of this .c_str() by implementing SecureString::operator=(std::string)
// Alternately, find a way to make params[0] mlock()'d to begin with.
SecureString strWalletPass;
strWalletPass.reserve(100);
strWalletPass = params[0].get_str().c_str();
if (strWalletPass.length() < 1)
throw runtime_error(
"encryptwallet <passphrase>\n"
"Encrypts the wallet with <passphrase>.");
if (!pwalletMain->EncryptWallet(strWalletPass))
throw JSONRPCError(RPC_WALLET_ENCRYPTION_FAILED, "Error: Failed to encrypt the wallet.");
// BDB seems to have a bad habit of writing old data into
// slack space in .dat files; that is bad if the old data is
// unencrypted private keys. So:
StartShutdown();
return "wallet encrypted; Shitcoin server stopping, restart to run with encrypted wallet. The keypool has been flushed, you need to make a new backup.";
}
class DescribeAddressVisitor : public boost::static_visitor<Object>
{
public:
Object operator()(const CNoDestination &dest) const { return Object(); }
Object operator()(const CKeyID &keyID) const {
Object obj;
CPubKey vchPubKey;
pwalletMain->GetPubKey(keyID, vchPubKey);
obj.push_back(Pair("isscript", false));
obj.push_back(Pair("pubkey", HexStr(vchPubKey.Raw())));
obj.push_back(Pair("iscompressed", vchPubKey.IsCompressed()));
return obj;
}
Object operator()(const CScriptID &scriptID) const {
Object obj;
obj.push_back(Pair("isscript", true));
CScript subscript;
pwalletMain->GetCScript(scriptID, subscript);
std::vector<CTxDestination> addresses;
txnouttype whichType;
int nRequired;
ExtractDestinations(subscript, whichType, addresses, nRequired);
obj.push_back(Pair("script", GetTxnOutputType(whichType)));
Array a;
BOOST_FOREACH(const CTxDestination& addr, addresses)
a.push_back(CBitcoinAddress(addr).ToString());
obj.push_back(Pair("addresses", a));
if (whichType == TX_MULTISIG)
obj.push_back(Pair("sigsrequired", nRequired));
return obj;
}
};
Value validateaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"validateaddress <Shitcoinaddress>\n"
"Return information about <Shitcoinaddress>.");
CBitcoinAddress address(params[0].get_str());
bool isValid = address.IsValid();
Object ret;
ret.push_back(Pair("isvalid", isValid));
if (isValid)
{
CTxDestination dest = address.Get();
string currentAddress = address.ToString();
ret.push_back(Pair("address", currentAddress));
bool fMine = IsMine(*pwalletMain, dest);
ret.push_back(Pair("ismine", fMine));
if (fMine) {
Object detail = boost::apply_visitor(DescribeAddressVisitor(), dest);
ret.insert(ret.end(), detail.begin(), detail.end());
}
if (pwalletMain->mapAddressBook.count(dest))
ret.push_back(Pair("account", pwalletMain->mapAddressBook[dest]));
}
return ret;
}
Value validatepubkey(const Array& params, bool fHelp)
{
if (fHelp || !params.size() || params.size() > 2)
throw runtime_error(
"validatepubkey <Shitcoinpubkey>\n"
"Return information about <Shitcoinpubkey>.");
std::vector<unsigned char> vchPubKey = ParseHex(params[0].get_str());
CPubKey pubKey(vchPubKey);
bool isValid = pubKey.IsValid();
bool isCompressed = pubKey.IsCompressed();
CKeyID keyID = pubKey.GetID();
CBitcoinAddress address;
address.Set(keyID);
Object ret;
ret.push_back(Pair("isvalid", isValid));
if (isValid)
{
CTxDestination dest = address.Get();
string currentAddress = address.ToString();
ret.push_back(Pair("address", currentAddress));
bool fMine = IsMine(*pwalletMain, dest);
ret.push_back(Pair("ismine", fMine));
ret.push_back(Pair("iscompressed", isCompressed));
if (fMine) {
Object detail = boost::apply_visitor(DescribeAddressVisitor(), dest);
ret.insert(ret.end(), detail.begin(), detail.end());
}
if (pwalletMain->mapAddressBook.count(dest))
ret.push_back(Pair("account", pwalletMain->mapAddressBook[dest]));
}
return ret;
}
// ppcoin: reserve balance from being staked for network protection
Value reservebalance(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw runtime_error(
"reservebalance [<reserve> [amount]]\n"
"<reserve> is true or false to turn balance reserve on or off.\n"
"<amount> is a real and rounded to cent.\n"
"Set reserve amount not participating in network protection.\n"
"If no parameters provided current setting is printed.\n");
if (params.size() > 0)
{
bool fReserve = params[0].get_bool();
if (fReserve)
{
if (params.size() == 1)
throw runtime_error("must provide amount to reserve balance.\n");
int64 nAmount = AmountFromValue(params[1]);
nAmount = (nAmount / CENT) * CENT; // round to cent
if (nAmount < 0)
throw runtime_error("amount cannot be negative.\n");
mapArgs["-reservebalance"] = FormatMoney(nAmount).c_str();
}
else
{
if (params.size() > 1)
throw runtime_error("cannot specify amount to turn off reserve.\n");
mapArgs["-reservebalance"] = "0";
}
}
Object result;
int64 nReserveBalance = 0;
if (mapArgs.count("-reservebalance") && !ParseMoney(mapArgs["-reservebalance"], nReserveBalance))
throw runtime_error("invalid reserve balance amount\n");
result.push_back(Pair("reserve", (nReserveBalance > 0)));
result.push_back(Pair("amount", ValueFromAmount(nReserveBalance)));
return result;
}
// ppcoin: check wallet integrity
Value checkwallet(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 0)
throw runtime_error(
"checkwallet\n"
"Check wallet for integrity.\n");
int nMismatchSpent;
int64 nBalanceInQuestion;
pwalletMain->FixSpentCoins(nMismatchSpent, nBalanceInQuestion, true);
Object result;
if (nMismatchSpent == 0)
result.push_back(Pair("wallet check passed", true));
else
{
result.push_back(Pair("mismatched spent coins", nMismatchSpent));
result.push_back(Pair("amount in question", ValueFromAmount(nBalanceInQuestion)));
}
return result;
}
// ppcoin: repair wallet
Value repairwallet(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 0)
throw runtime_error(
"repairwallet\n"
"Repair wallet if checkwallet reports any problem.\n");
int nMismatchSpent;
int64 nBalanceInQuestion;
pwalletMain->FixSpentCoins(nMismatchSpent, nBalanceInQuestion);
Object result;
if (nMismatchSpent == 0)
result.push_back(Pair("wallet check passed", true));
else
{
result.push_back(Pair("mismatched spent coins", nMismatchSpent));
result.push_back(Pair("amount affected by repair", ValueFromAmount(nBalanceInQuestion)));
}
return result;
}
// Shitcoin: resend unconfirmed wallet transactions
Value resendtx(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw runtime_error(
"resendtx\n"
"Re-send unconfirmed transactions.\n"
);
ResendWalletTransactions();
return Value::null;
}
// ppcoin: make a public-private key pair
Value makekeypair(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw runtime_error(
"makekeypair [prefix]\n"
"Make a public/private key pair.\n"
"[prefix] is optional preferred prefix for the public key.\n");
string strPrefix = "";
if (params.size() > 0)
strPrefix = params[0].get_str();
CKey key;
key.MakeNewKey(false);
CPrivKey vchPrivKey = key.GetPrivKey();
Object result;
result.push_back(Pair("PrivateKey", HexStr<CPrivKey::iterator>(vchPrivKey.begin(), vchPrivKey.end())));
result.push_back(Pair("PublicKey", HexStr(key.GetPubKey().Raw())));
return result;
}
| {
"content_hash": "f1d7f57e11bdfe271c030b7c7f72b4b1",
"timestamp": "",
"source": "github",
"line_count": 1731,
"max_line_length": 157,
"avg_line_length": 34.9052570768342,
"alnum_prop": 0.6244021118485295,
"repo_name": "shitcoinproject/Shitcoin",
"id": "e9d212d1af819a620560d05a243aebe83692a055",
"size": "60759",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/rpcwallet.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "61562"
},
{
"name": "C",
"bytes": "7080"
},
{
"name": "C++",
"bytes": "1666678"
},
{
"name": "Makefile",
"bytes": "4826"
},
{
"name": "NSIS",
"bytes": "6041"
},
{
"name": "Objective-C",
"bytes": "858"
},
{
"name": "Objective-C++",
"bytes": "3537"
},
{
"name": "Python",
"bytes": "41580"
},
{
"name": "QMake",
"bytes": "12238"
},
{
"name": "Roff",
"bytes": "12684"
},
{
"name": "Shell",
"bytes": "1026"
}
],
"symlink_target": ""
} |
import * as glob from "glob";
import * as path from "path";
import * as fs from "fs";
const componentsRoot = path.join("src", "lib");
const componentDirectories = glob.sync(path.join(componentsRoot, "!(theme)", "/"));
/**
* One-off demo module rename:
* - Iterate through example subfolders in themis_components
* - coffee.coffee ->
* - Update from angular.module('thDemo', ['ThemisComponents'])
* to angular.module("{component}Demo")
* - html.html ->
* - Update ng-app attribute to the module defined above
*/
function updateDemoModules() {
// For each component
componentDirectories.forEach(componentPath => {
const componentName = path.basename(componentPath);
const exampleDirectory = path.join(componentsRoot, componentName, "examples");
if (!fs.existsSync(exampleDirectory) || !fs.statSync(exampleDirectory).isDirectory()) {
return;
}
// For each example
const exampleDirectories = glob.sync(path.join(exampleDirectory, "*", "/"));
exampleDirectories.forEach((examplePath, index) => {
const htmlPath = path.join(examplePath, "html.html");
const coffeePath = path.join(examplePath, "coffee.coffee");
const controllerName = `${componentName}DemoCtrl${index + 1}`;
// Template
if (fs.existsSync(htmlPath)) {
const htmlFile = fs.readFileSync(htmlPath, "utf8");
const updatedHtmlFile = htmlFile
.replace(/ng-app=\"[a-zA-Z]+\"/, `ng-app="${componentName}Demo"`)
.replace(/ng-controller=\"(DemoCtrl|DemoController)\s/,
`ng-controller="${controllerName} `);
if (htmlFile !== updatedHtmlFile) {
fs.writeFile(htmlPath, updatedHtmlFile, err => {
if (err) {
console.log(err);
}
console.log(`${htmlPath} updated.`);
});
} else {
console.log(`*** ${htmlPath} was NOT updated. ***`);
}
}
// Controller
if (fs.existsSync(coffeePath)) {
const coffeeFile = fs.readFileSync(coffeePath, "utf8");
const updatedCoffeeFile = coffeeFile
.replace(/angular\.module(\(|\s)('|")[a-zA-Z]+('|"), \[('|")ThemisComponents('|")\]\)?/,
`angular.module("${componentName}Demo")`)
.replace(/\.controller(\(|\s)('|")(DemoCtrl|DemoController)('|")(\)|\s)?/,
`.controller "${controllerName}"`);
if (coffeeFile !== updatedCoffeeFile) {
fs.writeFile(coffeePath, updatedCoffeeFile, err => {
if (err) {
console.log(err);
}
console.log(`${coffeePath} updated.`);
});
} else {
console.log(`*** ${coffeePath} was NOT updated.***`);
}
}
});
});
}
updateDemoModules();
| {
"content_hash": "0271654a79ca2055b0cf648b75361cff",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 98,
"avg_line_length": 35.52564102564103,
"alnum_prop": 0.583182966438109,
"repo_name": "seanhealy/lib-themisui",
"id": "2435aebdbc92daf990a8070d5880b944bc46a0da",
"size": "2771",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/generators/_archived/exampleModulesGenerator.ts",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "104986"
},
{
"name": "CoffeeScript",
"bytes": "497906"
},
{
"name": "HTML",
"bytes": "106359"
},
{
"name": "JavaScript",
"bytes": "9812"
},
{
"name": "TypeScript",
"bytes": "4604"
}
],
"symlink_target": ""
} |
package org.spongepowered.common.item.inventory.lens.comp;
public interface GridInventoryLens<TInventory, TStack> extends Inventory2DLens<TInventory, TStack> {
public abstract InventoryRowLens<TInventory, TStack> getRow(int row);
public abstract InventoryColumnLens<TInventory, TStack> getColumn(int column);
}
| {
"content_hash": "8a3732df45b57289ac6716ce83d48a41",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 100,
"avg_line_length": 33.2,
"alnum_prop": 0.786144578313253,
"repo_name": "kashike/SpongeCommon",
"id": "43bb15b0bca1360dc73d70f1cb030c3e3a12e841",
"size": "1579",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "src/main/java/org/spongepowered/common/item/inventory/lens/comp/GridInventoryLens.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "7079904"
},
{
"name": "Shell",
"bytes": "81"
}
],
"symlink_target": ""
} |
class AddUserIndexes < ActiveRecord::Migration
def self.up
# Increase username limit from 15 to 100 characters.
change_column(:users, :uniqueid, :string, :limit => 100)
add_index(:users, :uniqueid, :unique => true)
add_index(:users, :email, :unique => true)
end
def self.down
remove_index(:users, :uniqueid)
remove_index(:users, :email)
end
end
| {
"content_hash": "650d5164467d6bd06387c8ebd01fae1d",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 60,
"avg_line_length": 27.071428571428573,
"alnum_prop": 0.6701846965699209,
"repo_name": "mikehelmick/CascadeLMS",
"id": "db788b031d8e6d11c961c7d839da242c50d0855d",
"size": "379",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "db/migrate/20130209204418_add_user_indexes.rb",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "1338"
},
{
"name": "CSS",
"bytes": "62822"
},
{
"name": "HTML",
"bytes": "863454"
},
{
"name": "JavaScript",
"bytes": "354708"
},
{
"name": "Ruby",
"bytes": "1094030"
},
{
"name": "Shell",
"bytes": "861"
}
],
"symlink_target": ""
} |
var passport = require("passport");
var GoogleAuth = require("passport-google-oauth20").Strategy;
var config = require("../config");
var mongoose=require('mongoose')
const User = mongoose.model('users')
passport.serializeUser(function(user,done){
done(null,user.id)
})
passport.deserializeUser(function(id,done){
User.findById(id).then(function(user){
done(null,user)
})
})
//define gogole strategy with passport
passport.use(
new GoogleAuth(
{
clientID: config.clientID,
clientSecret: config.clientSecret,
callbackURL: "/auth/google/callback",
proxy:true
},
async function(accessToken,refreshToken,profile,done) { //callback after getting authenticated by google
const existingUser = await User.findOne({googleID:profile.id})
if(existingUser){
done(null,existingUser)
}
else{
const user = await new User({googleID:profile.id}).save()
done(null,user);
}
}
)
); | {
"content_hash": "3ad489521cd3df5e7eccee921bf118b9",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 110,
"avg_line_length": 26.973684210526315,
"alnum_prop": 0.64,
"repo_name": "sapta94/SurveyApp",
"id": "eca69288a51eaa5a00d75c67d7fea56c27deca13",
"size": "1025",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "services/passport.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1681"
},
{
"name": "JavaScript",
"bytes": "18689"
}
],
"symlink_target": ""
} |
package opennlp.model;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Collecting event and context counts by making two passes over the events. The
* first pass determines which contexts will be used by the model, and the
* second pass creates the events in memory containing only the contexts which
* will be used. This greatly reduces the amount of memory required for storing
* the events. During the first pass a temporary event file is created which
* is read during the second pass.
*/
public class TwoPassDataIndexer extends AbstractDataIndexer{
/**
* One argument constructor for DataIndexer which calls the two argument
* constructor assuming no cutoff.
*
* @param eventStream An Event[] which contains the a list of all the Events
* seen in the training data.
*/
public TwoPassDataIndexer(EventStream eventStream) throws IOException {
this(eventStream, 0);
}
public TwoPassDataIndexer(EventStream eventStream, int cutoff) throws IOException {
this(eventStream,cutoff,true);
}
/**
* Two argument constructor for DataIndexer.
*
* @param eventStream An Event[] which contains the a list of all the Events
* seen in the training data.
* @param cutoff The minimum number of times a predicate must have been
* observed in order to be included in the model.
*/
public TwoPassDataIndexer(EventStream eventStream, int cutoff, boolean sort) throws IOException {
Map<String,Integer> predicateIndex = new HashMap<String,Integer>();
List<ComparableEvent> eventsToCompare;
System.out.println("Indexing events using cutoff of " + cutoff + "\n");
System.out.print("\tComputing event counts... ");
try {
File tmp = File.createTempFile("events", null);
tmp.deleteOnExit();
Writer osw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(tmp),"UTF8"));
int numEvents = computeEventCounts(eventStream, osw, predicateIndex, cutoff);
System.out.println("done. " + numEvents + " events");
System.out.print("\tIndexing... ");
FileEventStream fes = new FileEventStream(tmp);
try {
eventsToCompare = index(numEvents, fes, predicateIndex);
} finally {
fes.close();
}
// done with predicates
predicateIndex = null;
tmp.delete();
System.out.println("done.");
if (sort) {
System.out.print("Sorting and merging events... ");
}
else {
System.out.print("Collecting events... ");
}
sortAndMerge(eventsToCompare,sort);
System.out.println("Done indexing.");
}
catch(IOException e) {
System.err.println(e);
}
}
/**
* Reads events from <tt>eventStream</tt> into a linked list. The
* predicates associated with each event are counted and any which
* occur at least <tt>cutoff</tt> times are added to the
* <tt>predicatesInOut</tt> map along with a unique integer index.
*
* @param eventStream an <code>EventStream</code> value
* @param eventStore a writer to which the events are written to for later processing.
* @param predicatesInOut a <code>TObjectIntHashMap</code> value
* @param cutoff an <code>int</code> value
*/
private int computeEventCounts(EventStream eventStream, Writer eventStore, Map<String,Integer> predicatesInOut, int cutoff) throws IOException {
Map<String,Integer> counter = new HashMap<String,Integer>();
int eventCount = 0;
Set<String> predicateSet = new HashSet<String>();
while (eventStream.hasNext()) {
Event ev = eventStream.next();
eventCount++;
eventStore.write(FileEventStream.toLine(ev));
String[] ec = ev.getContext();
update(ec,predicateSet,counter,cutoff);
}
predCounts = new int[predicateSet.size()];
int index = 0;
for (Iterator<String> pi=predicateSet.iterator();pi.hasNext();index++) {
String predicate = pi.next();
predCounts[index] = counter.get(predicate);
predicatesInOut.put(predicate,index);
}
eventStore.close();
return eventCount;
}
private List<ComparableEvent> index(int numEvents, EventStream es, Map<String,Integer> predicateIndex) throws IOException {
Map<String,Integer> omap = new HashMap<String,Integer>();
int outcomeCount = 0;
List<ComparableEvent> eventsToCompare = new ArrayList<ComparableEvent>(numEvents);
List<Integer> indexedContext = new ArrayList<Integer>();
while (es.hasNext()) {
Event ev = es.next();
String[] econtext = ev.getContext();
ComparableEvent ce;
int ocID;
String oc = ev.getOutcome();
if (omap.containsKey(oc)) {
ocID = omap.get(oc);
}
else {
ocID = outcomeCount++;
omap.put(oc, ocID);
}
for (String pred : econtext) {
if (predicateIndex.containsKey(pred)) {
indexedContext.add(predicateIndex.get(pred));
}
}
// drop events with no active features
if (indexedContext.size() > 0) {
int[] cons = new int[indexedContext.size()];
for (int ci=0;ci<cons.length;ci++) {
cons[ci] = indexedContext.get(ci);
}
ce = new ComparableEvent(ocID, cons);
eventsToCompare.add(ce);
}
else {
System.err.println("Dropped event " + ev.getOutcome() + ":" + Arrays.asList(ev.getContext()));
}
// recycle the TIntArrayList
indexedContext.clear();
}
outcomeLabels = toIndexedStringArray(omap);
predLabels = toIndexedStringArray(predicateIndex);
return eventsToCompare;
}
}
| {
"content_hash": "7d1405e8c0cee22b896c424b3a8156e8",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 146,
"avg_line_length": 34.18857142857143,
"alnum_prop": 0.6647166973090423,
"repo_name": "SowaLabs/OpenNLP",
"id": "fe2b3c39be1ad2935a6e62cd990baddbaef03be2",
"size": "6792",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "opennlp-maxent/src/main/java/opennlp/model/TwoPassDataIndexer.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1740"
},
{
"name": "Java",
"bytes": "2683666"
},
{
"name": "Shell",
"bytes": "4872"
},
{
"name": "XSLT",
"bytes": "1163"
}
],
"symlink_target": ""
} |
package com.facebook.buck.distributed;
import static org.easymock.EasyMock.anyLong;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.eq;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.not;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import com.facebook.buck.distributed.thrift.BuildJobStateFileHashEntry;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.MoreExecutors;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.easymock.EasyMock;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
public class MultiSourceContentsProviderTest {
private static final long FUTURE_GET_TIMEOUT_SECONDS = 5;
@Rule public TemporaryFolder tempDir = new TemporaryFolder();
private InlineContentsProvider mockInlineProvider;
private LocalFsContentsProvider mockLocalFsContentsProvider;
private ServerContentsProvider mockServerContentsProvider;
private FileMaterializationStatsTracker mockStatsTracker;
@Before
public void setUp() {
mockInlineProvider = EasyMock.createMock(InlineContentsProvider.class);
mockLocalFsContentsProvider = EasyMock.createMock(LocalFsContentsProvider.class);
mockServerContentsProvider = EasyMock.createMock(ServerContentsProvider.class);
mockStatsTracker = EasyMock.createMock(FileMaterializationStatsTracker.class);
}
@Test
public void testOrderOfContentProvidersAndStatsTracking()
throws IOException, ExecutionException, InterruptedException, TimeoutException {
Path targetAbsPath = tempDir.getRoot().toPath().resolve("my_file.txt");
BuildJobStateFileHashEntry entry1 = new BuildJobStateFileHashEntry().setSha1("1234");
BuildJobStateFileHashEntry entry2 = new BuildJobStateFileHashEntry().setSha1("2345");
BuildJobStateFileHashEntry entry3 = new BuildJobStateFileHashEntry().setSha1("3456");
BuildJobStateFileHashEntry entry4 = new BuildJobStateFileHashEntry().setSha1("4567");
// Let entry1 be a hit in the InlineProvider, and the rest be misses.
expect(mockInlineProvider.materializeFileContentsAsync(entry1, targetAbsPath))
.andReturn(Futures.immediateFuture(true))
.once();
expect(
mockInlineProvider.materializeFileContentsAsync(
anyObject(BuildJobStateFileHashEntry.class), eq(targetAbsPath)))
.andReturn(Futures.immediateFuture(false))
.times(3);
// Let entry2 be a hit in the LocalFsProvider, and the rest be misses.
expect(mockLocalFsContentsProvider.materializeFileContentsAsync(entry2, targetAbsPath))
.andReturn(Futures.immediateFuture(true))
.once();
expect(
mockLocalFsContentsProvider.materializeFileContentsAsync(
not(eq(entry2)), eq(targetAbsPath)))
.andReturn(Futures.immediateFuture(false))
.times(2);
// Let entry3 be a hit in the ServerContentsProvider, and the rest be misses.
// As a result, entry3 will also be cached into the LocalFsProvider.
expect(mockServerContentsProvider.materializeFileContentsAsync(entry3, targetAbsPath))
.andReturn(Futures.immediateFuture(true))
.once();
mockLocalFsContentsProvider.writeFileAndGetInputStream(entry3, targetAbsPath);
expectLastCall().once();
expect(
mockServerContentsProvider.materializeFileContentsAsync(
not(eq(entry3)), eq(targetAbsPath)))
.andReturn(Futures.immediateFuture(false))
.times(1);
mockInlineProvider.close();
expectLastCall().once();
mockLocalFsContentsProvider.close();
expectLastCall().once();
mockServerContentsProvider.close();
expectLastCall().once();
// Only one file from LocalFsProvider.
mockStatsTracker.recordLocalFileMaterialized();
expectLastCall().once();
// Only one file from ServerContentsProvider.
mockStatsTracker.recordRemoteFileMaterialized(anyLong());
expectLastCall().once();
replay(mockInlineProvider);
replay(mockLocalFsContentsProvider);
replay(mockServerContentsProvider);
replay(mockStatsTracker);
try (MultiSourceContentsProvider provider =
new MultiSourceContentsProvider(
mockInlineProvider,
Optional.of(mockLocalFsContentsProvider),
mockServerContentsProvider,
MoreExecutors.newDirectExecutorService(),
mockStatsTracker)) {
provider
.materializeFileContentsAsync(entry1, targetAbsPath)
.get(FUTURE_GET_TIMEOUT_SECONDS, TimeUnit.SECONDS);
provider
.materializeFileContentsAsync(entry2, targetAbsPath)
.get(FUTURE_GET_TIMEOUT_SECONDS, TimeUnit.SECONDS);
provider
.materializeFileContentsAsync(entry3, targetAbsPath)
.get(FUTURE_GET_TIMEOUT_SECONDS, TimeUnit.SECONDS);
provider
.materializeFileContentsAsync(entry4, targetAbsPath)
.get(FUTURE_GET_TIMEOUT_SECONDS, TimeUnit.SECONDS);
}
verify(mockInlineProvider);
verify(mockLocalFsContentsProvider);
verify(mockServerContentsProvider);
verify(mockStatsTracker);
}
}
| {
"content_hash": "0ff7dcb9069f2aaa61f04d748867148b",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 91,
"avg_line_length": 40.4,
"alnum_prop": 0.753025302530253,
"repo_name": "rmaz/buck",
"id": "91c49d2c5c85b120d9c7f9af25c0edb478ebe00a",
"size": "6059",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "test/com/facebook/buck/distributed/MultiSourceContentsProviderTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "1585"
},
{
"name": "Batchfile",
"bytes": "3875"
},
{
"name": "C",
"bytes": "281295"
},
{
"name": "C#",
"bytes": "237"
},
{
"name": "C++",
"bytes": "18966"
},
{
"name": "CSS",
"bytes": "56106"
},
{
"name": "D",
"bytes": "1017"
},
{
"name": "Dockerfile",
"bytes": "2081"
},
{
"name": "Go",
"bytes": "10020"
},
{
"name": "Groovy",
"bytes": "3362"
},
{
"name": "HTML",
"bytes": "11252"
},
{
"name": "Haskell",
"bytes": "1008"
},
{
"name": "IDL",
"bytes": "480"
},
{
"name": "Java",
"bytes": "29307150"
},
{
"name": "JavaScript",
"bytes": "938678"
},
{
"name": "Kotlin",
"bytes": "25755"
},
{
"name": "Lex",
"bytes": "12772"
},
{
"name": "MATLAB",
"bytes": "47"
},
{
"name": "Makefile",
"bytes": "1916"
},
{
"name": "OCaml",
"bytes": "4935"
},
{
"name": "Objective-C",
"bytes": "176972"
},
{
"name": "Objective-C++",
"bytes": "34"
},
{
"name": "PowerShell",
"bytes": "2244"
},
{
"name": "Prolog",
"bytes": "2087"
},
{
"name": "Python",
"bytes": "2075938"
},
{
"name": "Roff",
"bytes": "1207"
},
{
"name": "Rust",
"bytes": "5716"
},
{
"name": "Scala",
"bytes": "5082"
},
{
"name": "Shell",
"bytes": "77999"
},
{
"name": "Smalltalk",
"bytes": "194"
},
{
"name": "Swift",
"bytes": "11393"
},
{
"name": "Thrift",
"bytes": "48632"
},
{
"name": "Yacc",
"bytes": "323"
}
],
"symlink_target": ""
} |
"""
Mobile IP.
"""
from scapy.fields import ByteEnumField, ByteField, IPField, LongField, \
ShortField, XByteField
from scapy.packet import Packet, bind_layers, bind_bottom_up
from scapy.layers.inet import IP, UDP
class MobileIP(Packet):
name = "Mobile IP (RFC3344)"
fields_desc = [ByteEnumField("type", 1, {1: "RRQ", 3: "RRP"})]
class MobileIPRRQ(Packet):
name = "Mobile IP Registration Request (RFC3344)"
fields_desc = [XByteField("flags", 0),
ShortField("lifetime", 180),
IPField("homeaddr", "0.0.0.0"),
IPField("haaddr", "0.0.0.0"),
IPField("coaddr", "0.0.0.0"),
LongField("id", 0), ]
class MobileIPRRP(Packet):
name = "Mobile IP Registration Reply (RFC3344)"
fields_desc = [ByteField("code", 0),
ShortField("lifetime", 180),
IPField("homeaddr", "0.0.0.0"),
IPField("haaddr", "0.0.0.0"),
LongField("id", 0), ]
class MobileIPTunnelData(Packet):
name = "Mobile IP Tunnel Data Message (RFC3519)"
fields_desc = [ByteField("nexthdr", 4),
ShortField("res", 0)]
bind_bottom_up(UDP, MobileIP, dport=434)
bind_bottom_up(UDP, MobileIP, sport=434)
bind_layers(UDP, MobileIP, sport=434, dport=434)
bind_layers(MobileIP, MobileIPRRQ, type=1)
bind_layers(MobileIP, MobileIPRRP, type=3)
bind_layers(MobileIP, MobileIPTunnelData, type=4)
bind_layers(MobileIPTunnelData, IP, nexthdr=4)
| {
"content_hash": "9234699bd69429599dd49c3f5c855c20",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 72,
"avg_line_length": 32.12765957446808,
"alnum_prop": 0.6059602649006622,
"repo_name": "4shadoww/hakkuframework",
"id": "67c2ce2059d841ab1a91963e2f7d5e0ca8ed93a9",
"size": "1704",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "lib/scapy/layers/mobileip.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7992059"
}
],
"symlink_target": ""
} |
<!doctype html>
<html>
<head>
<title>aline.ly</title>
<link rel="stylesheet" href="https://fonts.googleapis.com/icon?family=Material+Icons|Didact+Gothic">
<link rel="stylesheet" href="https://code.getmdl.io/1.3.0/material.light_blue-green.min.css">
<script defer src="https://code.getmdl.io/1.3.0/material.min.js"></script>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.3/jquery.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/clipboard@1/dist/clipboard.min.js"></script>
<link rel="stylesheet" type="text/css" href="/assets/styles.css">
<script src="https://www.gstatic.com/firebasejs/4.3.1/firebase.js"></script>
<script>
const PHYSICAL_PERSON_STR = '__PHYSICAL_PERSON__';
$('document').ready(function() {
function getQueryVariable(variable) {
const query = window.location.search.substring(1);
const vars = query.split('&');
for (let i = 0; i < vars.length; i++) {
const pair = vars[i].split('=');
if (decodeURIComponent(pair[0]) == variable) {
return decodeURIComponent(pair[1]);
}
}
console.log('Query variable %s not found', variable);
return null;
}
const currentLine = getQueryVariable('line_id');
$('document').ready(function() {
$('.btn').attr('data-clipboard-text', `aline.ly/${currentLine}`);
});
new Clipboard('.btn');
// Initialize Firebase
var config = {
apiKey: "AIzaSyCXnMBORL9lMwczlXaSheTBcLhs2xE6HQ4",
authDomain: "mhax-34a9e.firebaseapp.com",
databaseURL: "https://mhax-34a9e.firebaseio.com",
projectId: "mhax-34a9e",
storageBucket: "mhax-34a9e.appspot.com",
messagingSenderId: "919352994315"
};
firebase.initializeApp(config);
const auth = firebase.auth();
const db = firebase.database();
const lineRef = db.ref('/server').child('lines').child(currentLine);
const lines = lineRef.child('in_line');
const upNext = lineRef.child('up_next');
auth.signInAnonymously().then((result) => {
console.log(auth.currentUser.uid);
window.login = function() {
const inputFieldVal = document.getElementById('lineidinput').value;
$.ajax({
type: "POST",
url: '/api/admin/join',
data: {
password: inputFieldVal,
line_code: currentLine,
user_id: auth.currentUser.uid
},
success: function (response) {
if (response.status === 'success') {
const section1 = document.getElementById('section1');
const section2 = document.getElementById('section2');
section2.style.display = 'none';
section1.style.display = 'block';
}
},
dataType: 'json'
});
};
window.addPhysicalPerson = function() {
lines.child(PHYSICAL_PERSON_STR).set(+new Date());
console.log('added a physical person to the queue');
};
lineRef.on('value', (finalSnap) => {
const lineObjValue = finalSnap.val();
const color1 = lineObjValue.colors['0'];
const color2 = lineObjValue.colors['1'];
const color3 = lineObjValue.colors['2'];
document.getElementById('color-card-1').style['background-color'] = color1;
document.getElementById('color-card-2').style['background-color'] = color2;
document.getElementById('color-card-3').style['background-color'] = color3;
});
let isVisible = false;
const number = 1;
upNext.on('value', (snapshot) => {
const upNextVal = snapshot.val();
if (upNextVal.hasOwnProperty(PHYSICAL_PERSON_STR)) {
// do something
if (isVisible) {
number++;
const notificationField = document.getElementById('notification');
notificationField.innerHTML = `Let ${number} in!`;
} else {
isVisible = true;
const unhide = function() {
$('#notification').fadeOut('fast', function() {
isVisible = false;
});
};
$('#notification').fadeIn('fast', function() {});
setTimeout(unhide, 3000);
}
}
});
}).catch((error) => {
console.log(error);
});
});
</script>
<style>
.color-card { height: 150px; max-width: 150px; }
#color-card-1 { background-color: #AD1457; }
#color-card-2 { background-color: #E040FB; }
#color-card-3 { background-color: #2E7D32; }
#section1 { display: none }
#section2 { display: block }
</style>
</head>
<body>
<div class="home-layout-transparent mdl-layout mdl-js-layout">
<header class="mdl-layout__header mdl-layout__header--transparent">
</header>
<div class="mdl-layout__drawer">
<span class="mdl-layout-title">aline.ly</span>
<nav class="mdl-navigation">
<a class="mdl-navigation__link" href=https://devpost.com/software/aline"">About</a>
<a class="mdl-navigation__link" href="/index.html">Join Line</a>
<a class="mdl-navigation__link" href="/new.html">Create Line</a>
</nav>
</div>
<main class="mdl-layout__content">
<div id="section1">
<div class="mdl-grid">
<div class="mdl-layout-spacer"></div>
<div class="mdl-cell mdl-cell--stretch mdl-layout-title" style="text-align: center">
<h3>Verification Colors</h3>
<p>Be sure that people who check in have colors synced with these ones!</p>
</div>
<div class="mdl-layout-spacer"></div>
</div>
<div class="mdl-grid">
<div class="mdl-layout-spacer"></div>
<div class="mdl-cell mdl-cell--4-col mdl-shadow--4dp color-card" id="color-card-1"></div>
<div class="mdl-cell mdl-cell--4-col mdl-shadow--4dp color-card" id="color-card-2"></div>
<div class="mdl-cell mdl-cell--4-col mdl-shadow--4dp color-card" id="color-card-3"></div>
<div class="mdl-layout-spacer"></div>
</div>
<div class="mdl-grid">
<div class="mdl-layout-spacer"></div>
<button onclick="addPhysicalPerson()" class="mdl-button mdl-button--raised mdl-button--colored mdl-js-button mdl-js-ripple-effect">Add Physical Person</button>
<div class="mdl-layout-spacer"></div>
</div>
<div id="number-button-container" class="mdl-grid">
<div class="mdl-layout-spacer"></div>
<button class="btn mdl-button mdl-button--raised mdl-button--colored mdl-js-button mdl-js-ripple-effect">Copy Link</button>
<div class="mdl-layout-spacer"></div>
</div>
<div class="mdl-grid">
<div class="mdl-layout-spacer"></div>
<div class="mdl-cell mdl-cell--stretch mdl-layout-title" style="text-align: center">
<h2 id="notification" style="display: none">Let 1 person in!</h2>
</div>
<div class="mdl-layout-spacer"></div>
</div>
</div>
<div id="section2" >
<form action="#">
<div class="mdl-textfield mdl-js-textfield" style="position: absolute; margin: auto; left: 0; right: 0; top: 30%; ">
<input class="mdl-textfield__input" type="password" id="lineidinput">
<label class="mdl-textfield__label" for="sample1">Enter your line password</label>
<button onclick="login()" type="button" class="mdl-button mdl-button--colored mdl-js-button mdl-js-ripple-effect" style="top: 50px;">Login</button>
</div>
</form>
<div class="mdl-layout-spacer"></div>
</div>
</main>
</div>
</body>
</html>
| {
"content_hash": "c631e41b6c6a678dddec6101b05204da",
"timestamp": "",
"source": "github",
"line_count": 209,
"max_line_length": 171,
"avg_line_length": 41.27272727272727,
"alnum_prop": 0.5238812891258985,
"repo_name": "sl/aline.ly",
"id": "b533ea7c3b51f40e86b6e66ca994df3c347c20b3",
"size": "8626",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "client/control.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "528"
},
{
"name": "HTML",
"bytes": "34544"
},
{
"name": "JavaScript",
"bytes": "33185"
}
],
"symlink_target": ""
} |
// Copyright (C) 2016 IBM Corp. All Rights Reserved.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.ibm.watson.developer_cloud.concept_insights.v2.model
import com.ibm.watson.developer_cloud.service.GenericModel
/**
* Created by Martin Harvan on 11/04/16.
*/
case class Part(
contentType: String,
data: Array[Byte],
name: String
) extends GenericModel | {
"content_hash": "64da4663a350525c55b78d84360f9e54",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 75,
"avg_line_length": 36.96296296296296,
"alnum_prop": 0.7565130260521042,
"repo_name": "kane77/watson-scala-wrapper",
"id": "cf4f6123d626c155186f8b12a5d16c783a9ed6e8",
"size": "998",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/main/scala/com/ibm/watson/developer_cloud/concept_insights/v2/model/Part.scala",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Scala",
"bytes": "49376"
}
],
"symlink_target": ""
} |
// Copyright 2022 The Terasology Foundation
// SPDX-License-Identifier: Apache-2.0
package org.terasology.engine;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.terasology.engine.audio.AudioManager;
import org.terasology.engine.audio.StaticSound;
import org.terasology.engine.audio.StreamingSound;
import org.terasology.engine.audio.nullAudio.NullAudioManager;
import org.terasology.engine.audio.nullAudio.NullSound;
import org.terasology.engine.audio.nullAudio.NullStreamingSound;
import org.terasology.engine.config.Config;
import org.terasology.engine.config.PlayerConfig;
import org.terasology.engine.context.Context;
import org.terasology.engine.core.ComponentSystemManager;
import org.terasology.engine.core.EngineTime;
import org.terasology.engine.core.PathManager;
import org.terasology.engine.core.Time;
import org.terasology.engine.core.bootstrap.EntitySystemSetupUtil;
import org.terasology.engine.core.modes.loadProcesses.LoadPrefabs;
import org.terasology.engine.core.module.ExternalApiWhitelist;
import org.terasology.engine.core.module.ModuleManager;
import org.terasology.engine.core.subsystem.headless.assets.HeadlessMaterial;
import org.terasology.engine.core.subsystem.headless.assets.HeadlessMesh;
import org.terasology.engine.core.subsystem.headless.assets.HeadlessShader;
import org.terasology.engine.core.subsystem.headless.assets.HeadlessSkeletalMesh;
import org.terasology.engine.core.subsystem.headless.assets.HeadlessTexture;
import org.terasology.engine.entitySystem.entity.internal.EngineEntityManager;
import org.terasology.engine.entitySystem.prefab.Prefab;
import org.terasology.engine.entitySystem.prefab.internal.PojoPrefab;
import org.terasology.engine.identity.storageServiceClient.StorageServiceWorker;
import org.terasology.engine.logic.behavior.asset.BehaviorTree;
import org.terasology.engine.network.NetworkSystem;
import org.terasology.engine.network.internal.NetworkSystemImpl;
import org.terasology.engine.persistence.StorageManager;
import org.terasology.engine.persistence.internal.ReadWriteStorageManager;
import org.terasology.engine.persistence.typeHandling.extensionTypes.BlockFamilyTypeHandler;
import org.terasology.engine.persistence.typeHandling.extensionTypes.BlockTypeHandler;
import org.terasology.engine.persistence.typeHandling.extensionTypes.CollisionGroupTypeHandler;
import org.terasology.engine.physics.CollisionGroup;
import org.terasology.engine.physics.CollisionGroupManager;
import org.terasology.engine.recording.RecordAndReplayCurrentStatus;
import org.terasology.engine.recording.RecordAndReplaySerializer;
import org.terasology.engine.recording.RecordAndReplayUtils;
import org.terasology.engine.rendering.assets.animation.MeshAnimation;
import org.terasology.engine.rendering.assets.animation.MeshAnimationImpl;
import org.terasology.engine.rendering.assets.atlas.Atlas;
import org.terasology.engine.rendering.assets.font.Font;
import org.terasology.engine.rendering.assets.font.FontImpl;
import org.terasology.engine.rendering.assets.material.Material;
import org.terasology.engine.rendering.assets.mesh.Mesh;
import org.terasology.engine.rendering.assets.shader.Shader;
import org.terasology.engine.rendering.assets.skeletalmesh.SkeletalMesh;
import org.terasology.engine.rendering.assets.texture.PNGTextureFormat;
import org.terasology.engine.rendering.assets.texture.Texture;
import org.terasology.engine.rendering.assets.texture.TextureData;
import org.terasology.engine.rendering.assets.texture.subtexture.Subtexture;
import org.terasology.engine.testUtil.ModuleManagerFactory;
import org.terasology.engine.world.WorldProvider;
import org.terasology.engine.world.block.Block;
import org.terasology.engine.world.block.BlockManager;
import org.terasology.engine.world.block.family.BlockFamily;
import org.terasology.engine.world.block.family.BlockFamilyLibrary;
import org.terasology.engine.world.block.internal.BlockManagerImpl;
import org.terasology.engine.world.block.loader.BlockFamilyDefinition;
import org.terasology.engine.world.block.loader.BlockFamilyDefinitionData;
import org.terasology.engine.world.block.loader.BlockFamilyDefinitionFormat;
import org.terasology.engine.world.block.shapes.BlockShape;
import org.terasology.engine.world.block.shapes.BlockShapeImpl;
import org.terasology.engine.world.block.sounds.BlockSounds;
import org.terasology.engine.world.block.tiles.BlockTile;
import org.terasology.engine.world.block.tiles.NullWorldAtlas;
import org.terasology.engine.world.block.tiles.WorldAtlas;
import org.terasology.engine.world.chunks.blockdata.ExtraBlockDataManager;
import org.terasology.engine.world.internal.WorldInfo;
import org.terasology.engine.world.sun.BasicCelestialModel;
import org.terasology.engine.world.sun.CelestialSystem;
import org.terasology.engine.world.sun.DefaultCelestialSystem;
import org.terasology.engine.world.time.WorldTime;
import org.terasology.engine.world.time.WorldTimeImpl;
import org.terasology.gestalt.assets.AssetType;
import org.terasology.gestalt.assets.management.AssetManager;
import org.terasology.gestalt.assets.module.ModuleAwareAssetTypeManager;
import org.terasology.gestalt.assets.module.ModuleAwareAssetTypeManagerImpl;
import org.terasology.gestalt.module.ModuleEnvironment;
import org.terasology.gestalt.naming.Name;
import org.terasology.nui.asset.UIElement;
import org.terasology.nui.skin.UISkinAsset;
import org.terasology.persistence.typeHandling.TypeHandlerLibrary;
import org.terasology.reflection.ModuleTypeRegistry;
import org.terasology.reflection.TypeRegistry;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Set;
import java.util.stream.Collectors;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Setup a headless ( = no graphics ) environment.
* Based on TerasologyTestingEnvironment code.
* <p>
* <b>Deprecated</b> for use outside of {@code engine-tests}; modules should use ModuleTestingEnvironment.
*/
public class HeadlessEnvironment extends Environment {
private static final WorldTime WORLD_TIME = new WorldTimeImpl();
private static final Logger logger = LoggerFactory.getLogger(HeadlessEnvironment.class);
/**
* Setup a headless ( = no graphics ) environment
*
* @param modules a set of module names that should be loaded (latest version)
*/
public HeadlessEnvironment(Name... modules) {
super(modules);
}
@Override
protected void setupStorageManager() throws IOException {
ModuleManager moduleManager = context.get(ModuleManager.class);
EngineEntityManager engineEntityManager = context.get(EngineEntityManager.class);
BlockManager blockManager = context.get(BlockManager.class);
RecordAndReplaySerializer recordAndReplaySerializer = context.get(RecordAndReplaySerializer.class);
Path savePath = PathManager.getInstance().getSavePath("world1");
RecordAndReplayUtils recordAndReplayUtils = new RecordAndReplayUtils();
RecordAndReplayCurrentStatus recordAndReplayCurrentStatus = context.get(RecordAndReplayCurrentStatus.class);
ModuleEnvironment environment = context.get(ModuleManager.class).getEnvironment();
context.put(BlockFamilyLibrary.class, new BlockFamilyLibrary(environment, context));
ExtraBlockDataManager extraDataManager = context.get(ExtraBlockDataManager.class);
context.put(StorageManager.class, new ReadWriteStorageManager(savePath, moduleManager.getEnvironment(),
engineEntityManager, blockManager, extraDataManager, recordAndReplaySerializer, recordAndReplayUtils, recordAndReplayCurrentStatus));
}
@Override
protected void setupNetwork() {
EngineTime mockTime = mock(EngineTime.class);
context.put(Time.class, mockTime);
NetworkSystem networkSystem = new NetworkSystemImpl(mockTime, getContext());
context.put(NetworkSystem.class, networkSystem);
}
@Override
protected void setupEntitySystem() {
EntitySystemSetupUtil.addEntityManagementRelatedClasses(context);
}
@Override
protected void setupCollisionManager() {
CollisionGroupManager collisionGroupManager = new CollisionGroupManager();
context.put(CollisionGroupManager.class, collisionGroupManager);
context.get(TypeHandlerLibrary.class).addTypeHandler(CollisionGroup.class, new CollisionGroupTypeHandler(collisionGroupManager));
}
@Override
protected void setupBlockManager(AssetManager assetManager) {
WorldAtlas worldAtlas = new NullWorldAtlas();
BlockManagerImpl blockManager = new BlockManagerImpl(worldAtlas, assetManager);
context.put(BlockManager.class, blockManager);
TypeHandlerLibrary typeHandlerLibrary = context.get(TypeHandlerLibrary.class);
typeHandlerLibrary.addTypeHandler(BlockFamily.class, new BlockFamilyTypeHandler(blockManager));
typeHandlerLibrary.addTypeHandler(Block.class, new BlockTypeHandler(blockManager));
}
@Override
protected void setupExtraDataManager(Context context) {
context.put(ExtraBlockDataManager.class, new ExtraBlockDataManager(context));
}
@Override
protected AssetManager setupEmptyAssetManager() {
ModuleAwareAssetTypeManager assetTypeManager = new ModuleAwareAssetTypeManagerImpl();
assetTypeManager.switchEnvironment(context.get(ModuleManager.class).getEnvironment());
context.put(ModuleAwareAssetTypeManager.class, assetTypeManager);
context.put(AssetManager.class, assetTypeManager.getAssetManager());
return assetTypeManager.getAssetManager();
}
@Override
protected AssetManager setupAssetManager() {
ModuleAwareAssetTypeManager assetTypeManager = new ModuleAwareAssetTypeManagerImpl();
// cast lambdas explicitly to avoid inconsistent compiler behavior wrt. type inference
assetTypeManager.createAssetType(Prefab.class,
PojoPrefab::new, "prefabs");
assetTypeManager.createAssetType(BlockShape.class,
BlockShapeImpl::new, "shapes");
assetTypeManager.createAssetType(BlockSounds.class,
BlockSounds::new, "blockSounds");
assetTypeManager.createAssetType(BlockTile.class,
BlockTile::new, "blockTiles");
AssetType<BlockFamilyDefinition, BlockFamilyDefinitionData> blockFamilyDefinitionDataAssetType =
assetTypeManager.createAssetType(BlockFamilyDefinition.class,
BlockFamilyDefinition::new, "blocks");
assetTypeManager.getAssetFileDataProducer(blockFamilyDefinitionDataAssetType).addAssetFormat(
new BlockFamilyDefinitionFormat(assetTypeManager.getAssetManager()));
assetTypeManager.createAssetType(StaticSound.class, NullSound::new, "sounds");
assetTypeManager.createAssetType(StreamingSound.class, NullStreamingSound::new, "music");
assetTypeManager.createAssetType(UISkinAsset.class,
UISkinAsset::new, "skins");
assetTypeManager.createAssetType(BehaviorTree.class,
BehaviorTree::new, "behaviors");
assetTypeManager.createAssetType(UIElement.class,
UIElement::new, "ui");
assetTypeManager.createAssetType(Font.class,
FontImpl::new, "fonts");
AssetType<Texture, TextureData> textureDataAssetType = assetTypeManager.createAssetType(Texture.class,
HeadlessTexture::create, "textures", "fonts");
assetTypeManager.getAssetFileDataProducer(textureDataAssetType).addAssetFormat(
new PNGTextureFormat(Texture.FilterMode.NEAREST,
path -> path.getPath().get(1).equals("textures")));
assetTypeManager.getAssetFileDataProducer(textureDataAssetType).addAssetFormat(
new PNGTextureFormat(Texture.FilterMode.LINEAR, path -> path.getPath().get(1).equals("fonts")));
assetTypeManager.createAssetType(Shader.class,
HeadlessShader::new, "shaders");
assetTypeManager.createAssetType(Material.class,
HeadlessMaterial::new, "materials");
assetTypeManager.createAssetType(Mesh.class,
HeadlessMesh::new, "mesh");
assetTypeManager.createAssetType(SkeletalMesh.class,
HeadlessSkeletalMesh::new, "skeletalMesh");
assetTypeManager.createAssetType(MeshAnimation.class,
MeshAnimationImpl::new, "animations");
assetTypeManager.createAssetType(Atlas.class,
Atlas::new, "atlas");
assetTypeManager.createAssetType(Subtexture.class,
Subtexture::new);
assetTypeManager.switchEnvironment(context.get(ModuleManager.class).getEnvironment());
context.put(ModuleAwareAssetTypeManager.class, assetTypeManager);
context.put(AssetManager.class, assetTypeManager.getAssetManager());
return assetTypeManager.getAssetManager();
}
@Override
protected void setupAudio() {
NullAudioManager audioManager = new NullAudioManager();
context.put(AudioManager.class, audioManager);
}
@Override
protected void setupConfig() {
Config config = new Config(context);
config.loadDefaults();
context.put(Config.class, config);
context.put(StorageServiceWorker.class, mock(StorageServiceWorker.class));
context.put(PlayerConfig.class, mock(PlayerConfig.class));
}
@Override
protected void setupModuleManager(Set<Name> moduleNames) throws Exception {
TypeRegistry.WHITELISTED_CLASSES = ExternalApiWhitelist.CLASSES.stream().map(Class::getName).collect(Collectors.toSet());
ModuleManager moduleManager = ModuleManagerFactory.create();
ModuleTypeRegistry typeRegistry = new ModuleTypeRegistry(moduleManager.getEnvironment());
context.put(TypeRegistry.class, typeRegistry);
context.put(ModuleTypeRegistry.class, typeRegistry);
moduleManager.resolveAndLoadEnvironment(moduleNames);
context.put(ModuleManager.class, moduleManager);
EntitySystemSetupUtil.addReflectionBasedLibraries(context);
}
@Override
protected void setupPathManager() throws IOException {
Path tempHome = Files.createTempDirectory("terasology-env");
tempHome.toFile().deleteOnExit();
PathManager.getInstance().useOverrideHomePath(tempHome);
}
@Override
protected void setupComponentManager() {
ComponentSystemManager componentSystemManager = new ComponentSystemManager(context);
componentSystemManager.initialise();
context.put(ComponentSystemManager.class, componentSystemManager);
}
@Override
protected void setupWorldProvider() {
WorldProvider worldProvider = mock(WorldProvider.class);
when(worldProvider.getWorldInfo()).thenReturn(new WorldInfo());
when(worldProvider.getTime()).thenReturn(WORLD_TIME);
context.put(WorldProvider.class, worldProvider);
}
@Override
protected void setupCelestialSystem() {
DefaultCelestialSystem celestialSystem = new DefaultCelestialSystem(new BasicCelestialModel(), context);
context.put(CelestialSystem.class, celestialSystem);
}
@Override
protected void loadPrefabs() {
LoadPrefabs prefabLoadStep = new LoadPrefabs(context);
boolean complete = false;
prefabLoadStep.begin();
while (!complete) {
complete = prefabLoadStep.step();
}
}
@Override
public void close() throws Exception {
// it would be nice, if elements in the context implemented (Auto)Closeable
// The StorageManager creates a thread pool (through TaskMaster)
// which isn't closed automatically
StorageManager storageManager = context.get(StorageManager.class);
if (storageManager != null) {
storageManager.finishSavingAndShutdown();
}
super.close();
}
}
| {
"content_hash": "044ce979c9a8adfc221389b16508301e",
"timestamp": "",
"source": "github",
"line_count": 337,
"max_line_length": 149,
"avg_line_length": 47.65875370919881,
"alnum_prop": 0.7676981508000748,
"repo_name": "MovingBlocks/Terasology",
"id": "52b5239827a6b67753cc34ef04f214183a10a907",
"size": "16061",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "engine-tests/src/main/java/org/terasology/engine/HeadlessEnvironment.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4149"
},
{
"name": "Dockerfile",
"bytes": "292"
},
{
"name": "GLSL",
"bytes": "23786"
},
{
"name": "Groovy",
"bytes": "55531"
},
{
"name": "Java",
"bytes": "7272467"
},
{
"name": "Kotlin",
"bytes": "49638"
},
{
"name": "Shell",
"bytes": "11802"
}
],
"symlink_target": ""
} |
/**
* Created by Binh Vu (github: yobavu) on 7/15/17.
*/
package com.yobavu.jtwitch.oauth;
import javax.ws.rs.client.ClientRequestContext;
import javax.ws.rs.client.ClientRequestFilter;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MultivaluedMap;
import java.io.IOException;
/**
* OAuth 2 authentication filter to be used with Jersey client.
*/
public class OAuth2Authenticator implements ClientRequestFilter {
private final String apiVersion;
private final String clientId;
private final String accessToken;
public OAuth2Authenticator(String apiVersion, String clientId, String accessToken) {
this.apiVersion = apiVersion;
this.clientId = clientId;
this.accessToken = accessToken;
}
@Override
public void filter(ClientRequestContext requestContext) throws IOException {
MultivaluedMap<String, Object> headers = requestContext.getHeaders();
headers.add(HttpHeaders.ACCEPT, apiVersion);
headers.add("Client-ID", clientId);
headers.add(HttpHeaders.AUTHORIZATION, "OAuth " + accessToken);
}
}
| {
"content_hash": "ac45b46742fd081b2643686047357546",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 88,
"avg_line_length": 32.44117647058823,
"alnum_prop": 0.7325475974614687,
"repo_name": "yobavu/jTwitch",
"id": "3a7d22cd6dac98a71ea45c7a5cccda162ea69404",
"size": "1103",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/yobavu/jtwitch/oauth/OAuth2Authenticator.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "104288"
}
],
"symlink_target": ""
} |
//===--- RecursiveASTVisitor.h - Recursive AST Visitor ----------*- C++ -*-===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This file defines the RecursiveASTVisitor interface, which recursively
// traverses the entire AST.
//
//===----------------------------------------------------------------------===//
#ifndef LLVM_CLANG_AST_RECURSIVEASTVISITOR_H
#define LLVM_CLANG_AST_RECURSIVEASTVISITOR_H
#include "clang/AST/Attr.h"
#include "clang/AST/Decl.h"
#include "clang/AST/DeclarationName.h"
#include "clang/AST/DeclBase.h"
#include "clang/AST/DeclCXX.h"
#include "clang/AST/DeclFriend.h"
#include "clang/AST/DeclObjC.h"
#include "clang/AST/DeclOpenMP.h"
#include "clang/AST/DeclTemplate.h"
#include "clang/AST/Expr.h"
#include "clang/AST/ExprCXX.h"
#include "clang/AST/ExprObjC.h"
#include "clang/AST/ExprOpenMP.h"
#include "clang/AST/LambdaCapture.h"
#include "clang/AST/NestedNameSpecifier.h"
#include "clang/AST/OpenMPClause.h"
#include "clang/AST/Stmt.h"
#include "clang/AST/StmtCXX.h"
#include "clang/AST/StmtObjC.h"
#include "clang/AST/StmtOpenMP.h"
#include "clang/AST/TemplateBase.h"
#include "clang/AST/TemplateName.h"
#include "clang/AST/Type.h"
#include "clang/AST/TypeLoc.h"
#include "clang/Basic/LLVM.h"
#include "clang/Basic/OpenMPKinds.h"
#include "clang/Basic/Specifiers.h"
#include "llvm/ADT/PointerIntPair.h"
#include "llvm/ADT/SmallVector.h"
#include "llvm/Support/Casting.h"
#include <algorithm>
#include <cstddef>
#include <type_traits>
// The following three macros are used for meta programming. The code
// using them is responsible for defining macro OPERATOR().
// All unary operators.
#define UNARYOP_LIST() \
OPERATOR(PostInc) OPERATOR(PostDec) OPERATOR(PreInc) OPERATOR(PreDec) \
OPERATOR(AddrOf) OPERATOR(Deref) OPERATOR(Plus) OPERATOR(Minus) \
OPERATOR(Not) OPERATOR(LNot) OPERATOR(Real) OPERATOR(Imag) \
OPERATOR(Extension) OPERATOR(Coawait)
// All binary operators (excluding compound assign operators).
#define BINOP_LIST() \
OPERATOR(PtrMemD) OPERATOR(PtrMemI) OPERATOR(Mul) OPERATOR(Div) \
OPERATOR(Rem) OPERATOR(Add) OPERATOR(Sub) OPERATOR(Shl) OPERATOR(Shr) \
OPERATOR(LT) OPERATOR(GT) OPERATOR(LE) OPERATOR(GE) OPERATOR(EQ) \
OPERATOR(NE) OPERATOR(Cmp) OPERATOR(And) OPERATOR(Xor) OPERATOR(Or) \
OPERATOR(LAnd) OPERATOR(LOr) OPERATOR(Assign) OPERATOR(Comma)
// All compound assign operators.
#define CAO_LIST() \
OPERATOR(Mul) OPERATOR(Div) OPERATOR(Rem) OPERATOR(Add) OPERATOR(Sub) \
OPERATOR(Shl) OPERATOR(Shr) OPERATOR(And) OPERATOR(Or) OPERATOR(Xor)
namespace clang {
// A helper macro to implement short-circuiting when recursing. It
// invokes CALL_EXPR, which must be a method call, on the derived
// object (s.t. a user of RecursiveASTVisitor can override the method
// in CALL_EXPR).
#define TRY_TO(CALL_EXPR) \
do { \
if (!getDerived().CALL_EXPR) \
return false; \
} while (false)
/// A class that does preorder or postorder
/// depth-first traversal on the entire Clang AST and visits each node.
///
/// This class performs three distinct tasks:
/// 1. traverse the AST (i.e. go to each node);
/// 2. at a given node, walk up the class hierarchy, starting from
/// the node's dynamic type, until the top-most class (e.g. Stmt,
/// Decl, or Type) is reached.
/// 3. given a (node, class) combination, where 'class' is some base
/// class of the dynamic type of 'node', call a user-overridable
/// function to actually visit the node.
///
/// These tasks are done by three groups of methods, respectively:
/// 1. TraverseDecl(Decl *x) does task #1. It is the entry point
/// for traversing an AST rooted at x. This method simply
/// dispatches (i.e. forwards) to TraverseFoo(Foo *x) where Foo
/// is the dynamic type of *x, which calls WalkUpFromFoo(x) and
/// then recursively visits the child nodes of x.
/// TraverseStmt(Stmt *x) and TraverseType(QualType x) work
/// similarly.
/// 2. WalkUpFromFoo(Foo *x) does task #2. It does not try to visit
/// any child node of x. Instead, it first calls WalkUpFromBar(x)
/// where Bar is the direct parent class of Foo (unless Foo has
/// no parent), and then calls VisitFoo(x) (see the next list item).
/// 3. VisitFoo(Foo *x) does task #3.
///
/// These three method groups are tiered (Traverse* > WalkUpFrom* >
/// Visit*). A method (e.g. Traverse*) may call methods from the same
/// tier (e.g. other Traverse*) or one tier lower (e.g. WalkUpFrom*).
/// It may not call methods from a higher tier.
///
/// Note that since WalkUpFromFoo() calls WalkUpFromBar() (where Bar
/// is Foo's super class) before calling VisitFoo(), the result is
/// that the Visit*() methods for a given node are called in the
/// top-down order (e.g. for a node of type NamespaceDecl, the order will
/// be VisitDecl(), VisitNamedDecl(), and then VisitNamespaceDecl()).
///
/// This scheme guarantees that all Visit*() calls for the same AST
/// node are grouped together. In other words, Visit*() methods for
/// different nodes are never interleaved.
///
/// Clients of this visitor should subclass the visitor (providing
/// themselves as the template argument, using the curiously recurring
/// template pattern) and override any of the Traverse*, WalkUpFrom*,
/// and Visit* methods for declarations, types, statements,
/// expressions, or other AST nodes where the visitor should customize
/// behavior. Most users only need to override Visit*. Advanced
/// users may override Traverse* and WalkUpFrom* to implement custom
/// traversal strategies. Returning false from one of these overridden
/// functions will abort the entire traversal.
///
/// By default, this visitor tries to visit every part of the explicit
/// source code exactly once. The default policy towards templates
/// is to descend into the 'pattern' class or function body, not any
/// explicit or implicit instantiations. Explicit specializations
/// are still visited, and the patterns of partial specializations
/// are visited separately. This behavior can be changed by
/// overriding shouldVisitTemplateInstantiations() in the derived class
/// to return true, in which case all known implicit and explicit
/// instantiations will be visited at the same time as the pattern
/// from which they were produced.
///
/// By default, this visitor preorder traverses the AST. If postorder traversal
/// is needed, the \c shouldTraversePostOrder method needs to be overridden
/// to return \c true.
template <typename Derived> class RecursiveASTVisitor {
public:
/// A queue used for performing data recursion over statements.
/// Parameters involving this type are used to implement data
/// recursion over Stmts and Exprs within this class, and should
/// typically not be explicitly specified by derived classes.
/// The bool bit indicates whether the statement has been traversed or not.
typedef SmallVectorImpl<llvm::PointerIntPair<Stmt *, 1, bool>>
DataRecursionQueue;
/// Return a reference to the derived class.
Derived &getDerived() { return *static_cast<Derived *>(this); }
/// Return whether this visitor should recurse into
/// template instantiations.
bool shouldVisitTemplateInstantiations() const { return false; }
/// Return whether this visitor should recurse into the types of
/// TypeLocs.
bool shouldWalkTypesOfTypeLocs() const { return true; }
/// Return whether this visitor should recurse into implicit
/// code, e.g., implicit constructors and destructors.
bool shouldVisitImplicitCode() const { return false; }
/// Return whether this visitor should traverse post-order.
bool shouldTraversePostOrder() const { return false; }
/// Recursively visit a statement or expression, by
/// dispatching to Traverse*() based on the argument's dynamic type.
///
/// \returns false if the visitation was terminated early, true
/// otherwise (including when the argument is nullptr).
bool TraverseStmt(Stmt *S, DataRecursionQueue *Queue = nullptr);
/// Invoked before visiting a statement or expression via data recursion.
///
/// \returns false to skip visiting the node, true otherwise.
bool dataTraverseStmtPre(Stmt *S) { return true; }
/// Invoked after visiting a statement or expression via data recursion.
/// This is not invoked if the previously invoked \c dataTraverseStmtPre
/// returned false.
///
/// \returns false if the visitation was terminated early, true otherwise.
bool dataTraverseStmtPost(Stmt *S) { return true; }
/// Recursively visit a type, by dispatching to
/// Traverse*Type() based on the argument's getTypeClass() property.
///
/// \returns false if the visitation was terminated early, true
/// otherwise (including when the argument is a Null type).
bool TraverseType(QualType T);
/// Recursively visit a type with location, by dispatching to
/// Traverse*TypeLoc() based on the argument type's getTypeClass() property.
///
/// \returns false if the visitation was terminated early, true
/// otherwise (including when the argument is a Null type location).
bool TraverseTypeLoc(TypeLoc TL);
/// Recursively visit an attribute, by dispatching to
/// Traverse*Attr() based on the argument's dynamic type.
///
/// \returns false if the visitation was terminated early, true
/// otherwise (including when the argument is a Null type location).
bool TraverseAttr(Attr *At);
/// Recursively visit a declaration, by dispatching to
/// Traverse*Decl() based on the argument's dynamic type.
///
/// \returns false if the visitation was terminated early, true
/// otherwise (including when the argument is NULL).
bool TraverseDecl(Decl *D);
/// Recursively visit a C++ nested-name-specifier.
///
/// \returns false if the visitation was terminated early, true otherwise.
bool TraverseNestedNameSpecifier(NestedNameSpecifier *NNS);
/// Recursively visit a C++ nested-name-specifier with location
/// information.
///
/// \returns false if the visitation was terminated early, true otherwise.
bool TraverseNestedNameSpecifierLoc(NestedNameSpecifierLoc NNS);
/// Recursively visit a name with its location information.
///
/// \returns false if the visitation was terminated early, true otherwise.
bool TraverseDeclarationNameInfo(DeclarationNameInfo NameInfo);
/// Recursively visit a template name and dispatch to the
/// appropriate method.
///
/// \returns false if the visitation was terminated early, true otherwise.
bool TraverseTemplateName(TemplateName Template);
/// Recursively visit a template argument and dispatch to the
/// appropriate method for the argument type.
///
/// \returns false if the visitation was terminated early, true otherwise.
// FIXME: migrate callers to TemplateArgumentLoc instead.
bool TraverseTemplateArgument(const TemplateArgument &Arg);
/// Recursively visit a template argument location and dispatch to the
/// appropriate method for the argument type.
///
/// \returns false if the visitation was terminated early, true otherwise.
bool TraverseTemplateArgumentLoc(const TemplateArgumentLoc &ArgLoc);
/// Recursively visit a set of template arguments.
/// This can be overridden by a subclass, but it's not expected that
/// will be needed -- this visitor always dispatches to another.
///
/// \returns false if the visitation was terminated early, true otherwise.
// FIXME: take a TemplateArgumentLoc* (or TemplateArgumentListInfo) instead.
bool TraverseTemplateArguments(const TemplateArgument *Args,
unsigned NumArgs);
/// Recursively visit a base specifier. This can be overridden by a
/// subclass.
///
/// \returns false if the visitation was terminated early, true otherwise.
bool TraverseCXXBaseSpecifier(const CXXBaseSpecifier &Base);
/// Recursively visit a constructor initializer. This
/// automatically dispatches to another visitor for the initializer
/// expression, but not for the name of the initializer, so may
/// be overridden for clients that need access to the name.
///
/// \returns false if the visitation was terminated early, true otherwise.
bool TraverseConstructorInitializer(CXXCtorInitializer *Init);
/// Recursively visit a lambda capture. \c Init is the expression that
/// will be used to initialize the capture.
///
/// \returns false if the visitation was terminated early, true otherwise.
bool TraverseLambdaCapture(LambdaExpr *LE, const LambdaCapture *C,
Expr *Init);
/// Recursively visit the body of a lambda expression.
///
/// This provides a hook for visitors that need more context when visiting
/// \c LE->getBody().
///
/// \returns false if the visitation was terminated early, true otherwise.
bool TraverseLambdaBody(LambdaExpr *LE, DataRecursionQueue *Queue = nullptr);
/// Recursively visit the syntactic or semantic form of an
/// initialization list.
///
/// \returns false if the visitation was terminated early, true otherwise.
bool TraverseSynOrSemInitListExpr(InitListExpr *S,
DataRecursionQueue *Queue = nullptr);
// ---- Methods on Attrs ----
// Visit an attribute.
bool VisitAttr(Attr *A) { return true; }
// Declare Traverse* and empty Visit* for all Attr classes.
#define ATTR_VISITOR_DECLS_ONLY
#include "clang/AST/AttrVisitor.inc"
#undef ATTR_VISITOR_DECLS_ONLY
// ---- Methods on Stmts ----
Stmt::child_range getStmtChildren(Stmt *S) { return S->children(); }
private:
template<typename T, typename U>
struct has_same_member_pointer_type : std::false_type {};
template<typename T, typename U, typename R, typename... P>
struct has_same_member_pointer_type<R (T::*)(P...), R (U::*)(P...)>
: std::true_type {};
// Traverse the given statement. If the most-derived traverse function takes a
// data recursion queue, pass it on; otherwise, discard it. Note that the
// first branch of this conditional must compile whether or not the derived
// class can take a queue, so if we're taking the second arm, make the first
// arm call our function rather than the derived class version.
#define TRAVERSE_STMT_BASE(NAME, CLASS, VAR, QUEUE) \
(has_same_member_pointer_type<decltype( \
&RecursiveASTVisitor::Traverse##NAME), \
decltype(&Derived::Traverse##NAME)>::value \
? static_cast<typename std::conditional< \
has_same_member_pointer_type< \
decltype(&RecursiveASTVisitor::Traverse##NAME), \
decltype(&Derived::Traverse##NAME)>::value, \
Derived &, RecursiveASTVisitor &>::type>(*this) \
.Traverse##NAME(static_cast<CLASS *>(VAR), QUEUE) \
: getDerived().Traverse##NAME(static_cast<CLASS *>(VAR)))
// Try to traverse the given statement, or enqueue it if we're performing data
// recursion in the middle of traversing another statement. Can only be called
// from within a DEF_TRAVERSE_STMT body or similar context.
#define TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(S) \
do { \
if (!TRAVERSE_STMT_BASE(Stmt, Stmt, S, Queue)) \
return false; \
} while (false)
public:
// Declare Traverse*() for all concrete Stmt classes.
#define ABSTRACT_STMT(STMT)
#define STMT(CLASS, PARENT) \
bool Traverse##CLASS(CLASS *S, DataRecursionQueue *Queue = nullptr);
#include "clang/AST/StmtNodes.inc"
// The above header #undefs ABSTRACT_STMT and STMT upon exit.
// Define WalkUpFrom*() and empty Visit*() for all Stmt classes.
bool WalkUpFromStmt(Stmt *S) { return getDerived().VisitStmt(S); }
bool VisitStmt(Stmt *S) { return true; }
#define STMT(CLASS, PARENT) \
bool WalkUpFrom##CLASS(CLASS *S) { \
TRY_TO(WalkUpFrom##PARENT(S)); \
TRY_TO(Visit##CLASS(S)); \
return true; \
} \
bool Visit##CLASS(CLASS *S) { return true; }
#include "clang/AST/StmtNodes.inc"
// Define Traverse*(), WalkUpFrom*(), and Visit*() for unary
// operator methods. Unary operators are not classes in themselves
// (they're all opcodes in UnaryOperator) but do have visitors.
#define OPERATOR(NAME) \
bool TraverseUnary##NAME(UnaryOperator *S, \
DataRecursionQueue *Queue = nullptr) { \
if (!getDerived().shouldTraversePostOrder()) \
TRY_TO(WalkUpFromUnary##NAME(S)); \
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(S->getSubExpr()); \
return true; \
} \
bool WalkUpFromUnary##NAME(UnaryOperator *S) { \
TRY_TO(WalkUpFromUnaryOperator(S)); \
TRY_TO(VisitUnary##NAME(S)); \
return true; \
} \
bool VisitUnary##NAME(UnaryOperator *S) { return true; }
UNARYOP_LIST()
#undef OPERATOR
// Define Traverse*(), WalkUpFrom*(), and Visit*() for binary
// operator methods. Binary operators are not classes in themselves
// (they're all opcodes in BinaryOperator) but do have visitors.
#define GENERAL_BINOP_FALLBACK(NAME, BINOP_TYPE) \
bool TraverseBin##NAME(BINOP_TYPE *S, DataRecursionQueue *Queue = nullptr) { \
if (!getDerived().shouldTraversePostOrder()) \
TRY_TO(WalkUpFromBin##NAME(S)); \
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(S->getLHS()); \
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(S->getRHS()); \
return true; \
} \
bool WalkUpFromBin##NAME(BINOP_TYPE *S) { \
TRY_TO(WalkUpFrom##BINOP_TYPE(S)); \
TRY_TO(VisitBin##NAME(S)); \
return true; \
} \
bool VisitBin##NAME(BINOP_TYPE *S) { return true; }
#define OPERATOR(NAME) GENERAL_BINOP_FALLBACK(NAME, BinaryOperator)
BINOP_LIST()
#undef OPERATOR
// Define Traverse*(), WalkUpFrom*(), and Visit*() for compound
// assignment methods. Compound assignment operators are not
// classes in themselves (they're all opcodes in
// CompoundAssignOperator) but do have visitors.
#define OPERATOR(NAME) \
GENERAL_BINOP_FALLBACK(NAME##Assign, CompoundAssignOperator)
CAO_LIST()
#undef OPERATOR
#undef GENERAL_BINOP_FALLBACK
// ---- Methods on Types ----
// FIXME: revamp to take TypeLoc's rather than Types.
// Declare Traverse*() for all concrete Type classes.
#define ABSTRACT_TYPE(CLASS, BASE)
#define TYPE(CLASS, BASE) bool Traverse##CLASS##Type(CLASS##Type *T);
#include "clang/AST/TypeNodes.def"
// The above header #undefs ABSTRACT_TYPE and TYPE upon exit.
// Define WalkUpFrom*() and empty Visit*() for all Type classes.
bool WalkUpFromType(Type *T) { return getDerived().VisitType(T); }
bool VisitType(Type *T) { return true; }
#define TYPE(CLASS, BASE) \
bool WalkUpFrom##CLASS##Type(CLASS##Type *T) { \
TRY_TO(WalkUpFrom##BASE(T)); \
TRY_TO(Visit##CLASS##Type(T)); \
return true; \
} \
bool Visit##CLASS##Type(CLASS##Type *T) { return true; }
#include "clang/AST/TypeNodes.def"
// ---- Methods on TypeLocs ----
// FIXME: this currently just calls the matching Type methods
// Declare Traverse*() for all concrete TypeLoc classes.
#define ABSTRACT_TYPELOC(CLASS, BASE)
#define TYPELOC(CLASS, BASE) bool Traverse##CLASS##TypeLoc(CLASS##TypeLoc TL);
#include "clang/AST/TypeLocNodes.def"
// The above header #undefs ABSTRACT_TYPELOC and TYPELOC upon exit.
// Define WalkUpFrom*() and empty Visit*() for all TypeLoc classes.
bool WalkUpFromTypeLoc(TypeLoc TL) { return getDerived().VisitTypeLoc(TL); }
bool VisitTypeLoc(TypeLoc TL) { return true; }
// QualifiedTypeLoc and UnqualTypeLoc are not declared in
// TypeNodes.def and thus need to be handled specially.
bool WalkUpFromQualifiedTypeLoc(QualifiedTypeLoc TL) {
return getDerived().VisitUnqualTypeLoc(TL.getUnqualifiedLoc());
}
bool VisitQualifiedTypeLoc(QualifiedTypeLoc TL) { return true; }
bool WalkUpFromUnqualTypeLoc(UnqualTypeLoc TL) {
return getDerived().VisitUnqualTypeLoc(TL.getUnqualifiedLoc());
}
bool VisitUnqualTypeLoc(UnqualTypeLoc TL) { return true; }
// Note that BASE includes trailing 'Type' which CLASS doesn't.
#define TYPE(CLASS, BASE) \
bool WalkUpFrom##CLASS##TypeLoc(CLASS##TypeLoc TL) { \
TRY_TO(WalkUpFrom##BASE##Loc(TL)); \
TRY_TO(Visit##CLASS##TypeLoc(TL)); \
return true; \
} \
bool Visit##CLASS##TypeLoc(CLASS##TypeLoc TL) { return true; }
#include "clang/AST/TypeNodes.def"
// ---- Methods on Decls ----
// Declare Traverse*() for all concrete Decl classes.
#define ABSTRACT_DECL(DECL)
#define DECL(CLASS, BASE) bool Traverse##CLASS##Decl(CLASS##Decl *D);
#include "clang/AST/DeclNodes.inc"
// The above header #undefs ABSTRACT_DECL and DECL upon exit.
// Define WalkUpFrom*() and empty Visit*() for all Decl classes.
bool WalkUpFromDecl(Decl *D) { return getDerived().VisitDecl(D); }
bool VisitDecl(Decl *D) { return true; }
#define DECL(CLASS, BASE) \
bool WalkUpFrom##CLASS##Decl(CLASS##Decl *D) { \
TRY_TO(WalkUpFrom##BASE(D)); \
TRY_TO(Visit##CLASS##Decl(D)); \
return true; \
} \
bool Visit##CLASS##Decl(CLASS##Decl *D) { return true; }
#include "clang/AST/DeclNodes.inc"
bool canIgnoreChildDeclWhileTraversingDeclContext(const Decl *Child);
private:
// These are helper methods used by more than one Traverse* method.
bool TraverseTemplateParameterListHelper(TemplateParameterList *TPL);
// Traverses template parameter lists of either a DeclaratorDecl or TagDecl.
template <typename T>
bool TraverseDeclTemplateParameterLists(T *D);
#define DEF_TRAVERSE_TMPL_INST(TMPLDECLKIND) \
bool TraverseTemplateInstantiations(TMPLDECLKIND##TemplateDecl *D);
DEF_TRAVERSE_TMPL_INST(Class)
DEF_TRAVERSE_TMPL_INST(Var)
DEF_TRAVERSE_TMPL_INST(Function)
#undef DEF_TRAVERSE_TMPL_INST
bool TraverseTemplateArgumentLocsHelper(const TemplateArgumentLoc *TAL,
unsigned Count);
bool TraverseArrayTypeLocHelper(ArrayTypeLoc TL);
bool TraverseRecordHelper(RecordDecl *D);
bool TraverseCXXRecordHelper(CXXRecordDecl *D);
bool TraverseDeclaratorHelper(DeclaratorDecl *D);
bool TraverseDeclContextHelper(DeclContext *DC);
bool TraverseFunctionHelper(FunctionDecl *D);
bool TraverseVarHelper(VarDecl *D);
bool TraverseOMPExecutableDirective(OMPExecutableDirective *S);
bool TraverseOMPLoopDirective(OMPLoopDirective *S);
bool TraverseOMPClause(OMPClause *C);
#define OPENMP_CLAUSE(Name, Class) bool Visit##Class(Class *C);
#include "clang/Basic/OpenMPKinds.def"
/// Process clauses with list of variables.
template <typename T> bool VisitOMPClauseList(T *Node);
/// Process clauses with pre-initis.
bool VisitOMPClauseWithPreInit(OMPClauseWithPreInit *Node);
bool VisitOMPClauseWithPostUpdate(OMPClauseWithPostUpdate *Node);
bool dataTraverseNode(Stmt *S, DataRecursionQueue *Queue);
bool PostVisitStmt(Stmt *S);
};
template <typename Derived>
bool RecursiveASTVisitor<Derived>::dataTraverseNode(Stmt *S,
DataRecursionQueue *Queue) {
#define DISPATCH_STMT(NAME, CLASS, VAR) \
return TRAVERSE_STMT_BASE(NAME, CLASS, VAR, Queue);
// If we have a binary expr, dispatch to the subcode of the binop. A smart
// optimizer (e.g. LLVM) will fold this comparison into the switch stmt
// below.
if (BinaryOperator *BinOp = dyn_cast<BinaryOperator>(S)) {
switch (BinOp->getOpcode()) {
#define OPERATOR(NAME) \
case BO_##NAME: \
DISPATCH_STMT(Bin##NAME, BinaryOperator, S);
BINOP_LIST()
#undef OPERATOR
#undef BINOP_LIST
#define OPERATOR(NAME) \
case BO_##NAME##Assign: \
DISPATCH_STMT(Bin##NAME##Assign, CompoundAssignOperator, S);
CAO_LIST()
#undef OPERATOR
#undef CAO_LIST
}
} else if (UnaryOperator *UnOp = dyn_cast<UnaryOperator>(S)) {
switch (UnOp->getOpcode()) {
#define OPERATOR(NAME) \
case UO_##NAME: \
DISPATCH_STMT(Unary##NAME, UnaryOperator, S);
UNARYOP_LIST()
#undef OPERATOR
#undef UNARYOP_LIST
}
}
// Top switch stmt: dispatch to TraverseFooStmt for each concrete FooStmt.
switch (S->getStmtClass()) {
case Stmt::NoStmtClass:
break;
#define ABSTRACT_STMT(STMT)
#define STMT(CLASS, PARENT) \
case Stmt::CLASS##Class: \
DISPATCH_STMT(CLASS, CLASS, S);
#include "clang/AST/StmtNodes.inc"
}
return true;
}
#undef DISPATCH_STMT
template <typename Derived>
bool RecursiveASTVisitor<Derived>::PostVisitStmt(Stmt *S) {
switch (S->getStmtClass()) {
case Stmt::NoStmtClass:
break;
#define ABSTRACT_STMT(STMT)
#define STMT(CLASS, PARENT) \
case Stmt::CLASS##Class: \
TRY_TO(WalkUpFrom##CLASS(static_cast<CLASS *>(S))); break;
#define INITLISTEXPR(CLASS, PARENT) \
case Stmt::CLASS##Class: \
{ \
auto ILE = static_cast<CLASS *>(S); \
if (auto Syn = ILE->isSemanticForm() ? ILE->getSyntacticForm() : ILE) \
TRY_TO(WalkUpFrom##CLASS(Syn)); \
if (auto Sem = ILE->isSemanticForm() ? ILE : ILE->getSemanticForm()) \
TRY_TO(WalkUpFrom##CLASS(Sem)); \
break; \
}
#include "clang/AST/StmtNodes.inc"
}
return true;
}
#undef DISPATCH_STMT
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseStmt(Stmt *S,
DataRecursionQueue *Queue) {
if (!S)
return true;
if (Queue) {
Queue->push_back({S, false});
return true;
}
SmallVector<llvm::PointerIntPair<Stmt *, 1, bool>, 8> LocalQueue;
LocalQueue.push_back({S, false});
while (!LocalQueue.empty()) {
auto &CurrSAndVisited = LocalQueue.back();
Stmt *CurrS = CurrSAndVisited.getPointer();
bool Visited = CurrSAndVisited.getInt();
if (Visited) {
LocalQueue.pop_back();
TRY_TO(dataTraverseStmtPost(CurrS));
if (getDerived().shouldTraversePostOrder()) {
TRY_TO(PostVisitStmt(CurrS));
}
continue;
}
if (getDerived().dataTraverseStmtPre(CurrS)) {
CurrSAndVisited.setInt(true);
size_t N = LocalQueue.size();
TRY_TO(dataTraverseNode(CurrS, &LocalQueue));
// Process new children in the order they were added.
std::reverse(LocalQueue.begin() + N, LocalQueue.end());
} else {
LocalQueue.pop_back();
}
}
return true;
}
#define DISPATCH(NAME, CLASS, VAR) \
return getDerived().Traverse##NAME(static_cast<CLASS *>(VAR))
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseType(QualType T) {
if (T.isNull())
return true;
switch (T->getTypeClass()) {
#define ABSTRACT_TYPE(CLASS, BASE)
#define TYPE(CLASS, BASE) \
case Type::CLASS: \
DISPATCH(CLASS##Type, CLASS##Type, const_cast<Type *>(T.getTypePtr()));
#include "clang/AST/TypeNodes.def"
}
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseTypeLoc(TypeLoc TL) {
if (TL.isNull())
return true;
switch (TL.getTypeLocClass()) {
#define ABSTRACT_TYPELOC(CLASS, BASE)
#define TYPELOC(CLASS, BASE) \
case TypeLoc::CLASS: \
return getDerived().Traverse##CLASS##TypeLoc(TL.castAs<CLASS##TypeLoc>());
#include "clang/AST/TypeLocNodes.def"
}
return true;
}
// Define the Traverse*Attr(Attr* A) methods
#define VISITORCLASS RecursiveASTVisitor
#include "clang/AST/AttrVisitor.inc"
#undef VISITORCLASS
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseDecl(Decl *D) {
if (!D)
return true;
// As a syntax visitor, by default we want to ignore declarations for
// implicit declarations (ones not typed explicitly by the user).
if (!getDerived().shouldVisitImplicitCode() && D->isImplicit())
return true;
switch (D->getKind()) {
#define ABSTRACT_DECL(DECL)
#define DECL(CLASS, BASE) \
case Decl::CLASS: \
if (!getDerived().Traverse##CLASS##Decl(static_cast<CLASS##Decl *>(D))) \
return false; \
break;
#include "clang/AST/DeclNodes.inc"
}
// Visit any attributes attached to this declaration.
for (auto *I : D->attrs()) {
if (!getDerived().TraverseAttr(I))
return false;
}
return true;
}
#undef DISPATCH
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseNestedNameSpecifier(
NestedNameSpecifier *NNS) {
if (!NNS)
return true;
if (NNS->getPrefix())
TRY_TO(TraverseNestedNameSpecifier(NNS->getPrefix()));
switch (NNS->getKind()) {
case NestedNameSpecifier::Identifier:
case NestedNameSpecifier::Namespace:
case NestedNameSpecifier::NamespaceAlias:
case NestedNameSpecifier::Global:
case NestedNameSpecifier::Super:
return true;
case NestedNameSpecifier::TypeSpec:
case NestedNameSpecifier::TypeSpecWithTemplate:
TRY_TO(TraverseType(QualType(NNS->getAsType(), 0)));
}
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseNestedNameSpecifierLoc(
NestedNameSpecifierLoc NNS) {
if (!NNS)
return true;
if (NestedNameSpecifierLoc Prefix = NNS.getPrefix())
TRY_TO(TraverseNestedNameSpecifierLoc(Prefix));
switch (NNS.getNestedNameSpecifier()->getKind()) {
case NestedNameSpecifier::Identifier:
case NestedNameSpecifier::Namespace:
case NestedNameSpecifier::NamespaceAlias:
case NestedNameSpecifier::Global:
case NestedNameSpecifier::Super:
return true;
case NestedNameSpecifier::TypeSpec:
case NestedNameSpecifier::TypeSpecWithTemplate:
TRY_TO(TraverseTypeLoc(NNS.getTypeLoc()));
break;
}
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseDeclarationNameInfo(
DeclarationNameInfo NameInfo) {
switch (NameInfo.getName().getNameKind()) {
case DeclarationName::CXXConstructorName:
case DeclarationName::CXXDestructorName:
case DeclarationName::CXXConversionFunctionName:
if (TypeSourceInfo *TSInfo = NameInfo.getNamedTypeInfo())
TRY_TO(TraverseTypeLoc(TSInfo->getTypeLoc()));
break;
case DeclarationName::CXXDeductionGuideName:
TRY_TO(TraverseTemplateName(
TemplateName(NameInfo.getName().getCXXDeductionGuideTemplate())));
break;
case DeclarationName::Identifier:
case DeclarationName::ObjCZeroArgSelector:
case DeclarationName::ObjCOneArgSelector:
case DeclarationName::ObjCMultiArgSelector:
case DeclarationName::CXXOperatorName:
case DeclarationName::CXXLiteralOperatorName:
case DeclarationName::CXXUsingDirective:
break;
}
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseTemplateName(TemplateName Template) {
if (DependentTemplateName *DTN = Template.getAsDependentTemplateName())
TRY_TO(TraverseNestedNameSpecifier(DTN->getQualifier()));
else if (QualifiedTemplateName *QTN = Template.getAsQualifiedTemplateName())
TRY_TO(TraverseNestedNameSpecifier(QTN->getQualifier()));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseTemplateArgument(
const TemplateArgument &Arg) {
switch (Arg.getKind()) {
case TemplateArgument::Null:
case TemplateArgument::Declaration:
case TemplateArgument::Integral:
case TemplateArgument::NullPtr:
return true;
case TemplateArgument::Type:
return getDerived().TraverseType(Arg.getAsType());
case TemplateArgument::Template:
case TemplateArgument::TemplateExpansion:
return getDerived().TraverseTemplateName(
Arg.getAsTemplateOrTemplatePattern());
case TemplateArgument::Expression:
return getDerived().TraverseStmt(Arg.getAsExpr());
case TemplateArgument::Pack:
return getDerived().TraverseTemplateArguments(Arg.pack_begin(),
Arg.pack_size());
}
return true;
}
// FIXME: no template name location?
// FIXME: no source locations for a template argument pack?
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseTemplateArgumentLoc(
const TemplateArgumentLoc &ArgLoc) {
const TemplateArgument &Arg = ArgLoc.getArgument();
switch (Arg.getKind()) {
case TemplateArgument::Null:
case TemplateArgument::Declaration:
case TemplateArgument::Integral:
case TemplateArgument::NullPtr:
return true;
case TemplateArgument::Type: {
// FIXME: how can TSI ever be NULL?
if (TypeSourceInfo *TSI = ArgLoc.getTypeSourceInfo())
return getDerived().TraverseTypeLoc(TSI->getTypeLoc());
else
return getDerived().TraverseType(Arg.getAsType());
}
case TemplateArgument::Template:
case TemplateArgument::TemplateExpansion:
if (ArgLoc.getTemplateQualifierLoc())
TRY_TO(getDerived().TraverseNestedNameSpecifierLoc(
ArgLoc.getTemplateQualifierLoc()));
return getDerived().TraverseTemplateName(
Arg.getAsTemplateOrTemplatePattern());
case TemplateArgument::Expression:
return getDerived().TraverseStmt(ArgLoc.getSourceExpression());
case TemplateArgument::Pack:
return getDerived().TraverseTemplateArguments(Arg.pack_begin(),
Arg.pack_size());
}
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseTemplateArguments(
const TemplateArgument *Args, unsigned NumArgs) {
for (unsigned I = 0; I != NumArgs; ++I) {
TRY_TO(TraverseTemplateArgument(Args[I]));
}
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseConstructorInitializer(
CXXCtorInitializer *Init) {
if (TypeSourceInfo *TInfo = Init->getTypeSourceInfo())
TRY_TO(TraverseTypeLoc(TInfo->getTypeLoc()));
if (Init->isWritten() || getDerived().shouldVisitImplicitCode())
TRY_TO(TraverseStmt(Init->getInit()));
return true;
}
template <typename Derived>
bool
RecursiveASTVisitor<Derived>::TraverseLambdaCapture(LambdaExpr *LE,
const LambdaCapture *C,
Expr *Init) {
if (LE->isInitCapture(C))
TRY_TO(TraverseDecl(C->getCapturedVar()));
else
TRY_TO(TraverseStmt(Init));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseLambdaBody(
LambdaExpr *LE, DataRecursionQueue *Queue) {
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(LE->getBody());
return true;
}
// ----------------- Type traversal -----------------
// This macro makes available a variable T, the passed-in type.
#define DEF_TRAVERSE_TYPE(TYPE, CODE) \
template <typename Derived> \
bool RecursiveASTVisitor<Derived>::Traverse##TYPE(TYPE *T) { \
if (!getDerived().shouldTraversePostOrder()) \
TRY_TO(WalkUpFrom##TYPE(T)); \
{ CODE; } \
if (getDerived().shouldTraversePostOrder()) \
TRY_TO(WalkUpFrom##TYPE(T)); \
return true; \
}
DEF_TRAVERSE_TYPE(BuiltinType, {})
DEF_TRAVERSE_TYPE(ComplexType, { TRY_TO(TraverseType(T->getElementType())); })
DEF_TRAVERSE_TYPE(PointerType, { TRY_TO(TraverseType(T->getPointeeType())); })
DEF_TRAVERSE_TYPE(BlockPointerType,
{ TRY_TO(TraverseType(T->getPointeeType())); })
DEF_TRAVERSE_TYPE(LValueReferenceType,
{ TRY_TO(TraverseType(T->getPointeeType())); })
DEF_TRAVERSE_TYPE(RValueReferenceType,
{ TRY_TO(TraverseType(T->getPointeeType())); })
DEF_TRAVERSE_TYPE(MemberPointerType, {
TRY_TO(TraverseType(QualType(T->getClass(), 0)));
TRY_TO(TraverseType(T->getPointeeType()));
})
DEF_TRAVERSE_TYPE(AdjustedType, { TRY_TO(TraverseType(T->getOriginalType())); })
DEF_TRAVERSE_TYPE(DecayedType, { TRY_TO(TraverseType(T->getOriginalType())); })
DEF_TRAVERSE_TYPE(ConstantArrayType,
{ TRY_TO(TraverseType(T->getElementType())); })
DEF_TRAVERSE_TYPE(IncompleteArrayType,
{ TRY_TO(TraverseType(T->getElementType())); })
DEF_TRAVERSE_TYPE(VariableArrayType, {
TRY_TO(TraverseType(T->getElementType()));
TRY_TO(TraverseStmt(T->getSizeExpr()));
})
DEF_TRAVERSE_TYPE(DependentSizedArrayType, {
TRY_TO(TraverseType(T->getElementType()));
if (T->getSizeExpr())
TRY_TO(TraverseStmt(T->getSizeExpr()));
})
DEF_TRAVERSE_TYPE(DependentAddressSpaceType, {
TRY_TO(TraverseStmt(T->getAddrSpaceExpr()));
TRY_TO(TraverseType(T->getPointeeType()));
})
DEF_TRAVERSE_TYPE(DependentVectorType, {
if (T->getSizeExpr())
TRY_TO(TraverseStmt(T->getSizeExpr()));
TRY_TO(TraverseType(T->getElementType()));
})
DEF_TRAVERSE_TYPE(DependentSizedExtVectorType, {
if (T->getSizeExpr())
TRY_TO(TraverseStmt(T->getSizeExpr()));
TRY_TO(TraverseType(T->getElementType()));
})
DEF_TRAVERSE_TYPE(VectorType, { TRY_TO(TraverseType(T->getElementType())); })
DEF_TRAVERSE_TYPE(ExtVectorType, { TRY_TO(TraverseType(T->getElementType())); })
DEF_TRAVERSE_TYPE(FunctionNoProtoType,
{ TRY_TO(TraverseType(T->getReturnType())); })
DEF_TRAVERSE_TYPE(FunctionProtoType, {
TRY_TO(TraverseType(T->getReturnType()));
for (const auto &A : T->param_types()) {
TRY_TO(TraverseType(A));
}
for (const auto &E : T->exceptions()) {
TRY_TO(TraverseType(E));
}
if (Expr *NE = T->getNoexceptExpr())
TRY_TO(TraverseStmt(NE));
})
DEF_TRAVERSE_TYPE(UnresolvedUsingType, {})
DEF_TRAVERSE_TYPE(TypedefType, {})
DEF_TRAVERSE_TYPE(TypeOfExprType,
{ TRY_TO(TraverseStmt(T->getUnderlyingExpr())); })
DEF_TRAVERSE_TYPE(TypeOfType, { TRY_TO(TraverseType(T->getUnderlyingType())); })
DEF_TRAVERSE_TYPE(DecltypeType,
{ TRY_TO(TraverseStmt(T->getUnderlyingExpr())); })
DEF_TRAVERSE_TYPE(UnaryTransformType, {
TRY_TO(TraverseType(T->getBaseType()));
TRY_TO(TraverseType(T->getUnderlyingType()));
})
DEF_TRAVERSE_TYPE(AutoType, { TRY_TO(TraverseType(T->getDeducedType())); })
DEF_TRAVERSE_TYPE(DeducedTemplateSpecializationType, {
TRY_TO(TraverseTemplateName(T->getTemplateName()));
TRY_TO(TraverseType(T->getDeducedType()));
})
DEF_TRAVERSE_TYPE(RecordType, {})
DEF_TRAVERSE_TYPE(EnumType, {})
DEF_TRAVERSE_TYPE(TemplateTypeParmType, {})
DEF_TRAVERSE_TYPE(SubstTemplateTypeParmType, {
TRY_TO(TraverseType(T->getReplacementType()));
})
DEF_TRAVERSE_TYPE(SubstTemplateTypeParmPackType, {
TRY_TO(TraverseTemplateArgument(T->getArgumentPack()));
})
DEF_TRAVERSE_TYPE(TemplateSpecializationType, {
TRY_TO(TraverseTemplateName(T->getTemplateName()));
TRY_TO(TraverseTemplateArguments(T->getArgs(), T->getNumArgs()));
})
DEF_TRAVERSE_TYPE(InjectedClassNameType, {})
DEF_TRAVERSE_TYPE(AttributedType,
{ TRY_TO(TraverseType(T->getModifiedType())); })
DEF_TRAVERSE_TYPE(ParenType, { TRY_TO(TraverseType(T->getInnerType())); })
DEF_TRAVERSE_TYPE(ElaboratedType, {
if (T->getQualifier()) {
TRY_TO(TraverseNestedNameSpecifier(T->getQualifier()));
}
TRY_TO(TraverseType(T->getNamedType()));
})
DEF_TRAVERSE_TYPE(DependentNameType,
{ TRY_TO(TraverseNestedNameSpecifier(T->getQualifier())); })
DEF_TRAVERSE_TYPE(DependentTemplateSpecializationType, {
TRY_TO(TraverseNestedNameSpecifier(T->getQualifier()));
TRY_TO(TraverseTemplateArguments(T->getArgs(), T->getNumArgs()));
})
DEF_TRAVERSE_TYPE(PackExpansionType, { TRY_TO(TraverseType(T->getPattern())); })
DEF_TRAVERSE_TYPE(ObjCTypeParamType, {})
DEF_TRAVERSE_TYPE(ObjCInterfaceType, {})
DEF_TRAVERSE_TYPE(ObjCObjectType, {
// We have to watch out here because an ObjCInterfaceType's base
// type is itself.
if (T->getBaseType().getTypePtr() != T)
TRY_TO(TraverseType(T->getBaseType()));
for (auto typeArg : T->getTypeArgsAsWritten()) {
TRY_TO(TraverseType(typeArg));
}
})
DEF_TRAVERSE_TYPE(ObjCObjectPointerType,
{ TRY_TO(TraverseType(T->getPointeeType())); })
DEF_TRAVERSE_TYPE(AtomicType, { TRY_TO(TraverseType(T->getValueType())); })
DEF_TRAVERSE_TYPE(PipeType, { TRY_TO(TraverseType(T->getElementType())); })
#undef DEF_TRAVERSE_TYPE
// ----------------- TypeLoc traversal -----------------
// This macro makes available a variable TL, the passed-in TypeLoc.
// If requested, it calls WalkUpFrom* for the Type in the given TypeLoc,
// in addition to WalkUpFrom* for the TypeLoc itself, such that existing
// clients that override the WalkUpFrom*Type() and/or Visit*Type() methods
// continue to work.
#define DEF_TRAVERSE_TYPELOC(TYPE, CODE) \
template <typename Derived> \
bool RecursiveASTVisitor<Derived>::Traverse##TYPE##Loc(TYPE##Loc TL) { \
if (getDerived().shouldWalkTypesOfTypeLocs()) \
TRY_TO(WalkUpFrom##TYPE(const_cast<TYPE *>(TL.getTypePtr()))); \
TRY_TO(WalkUpFrom##TYPE##Loc(TL)); \
{ CODE; } \
return true; \
}
template <typename Derived>
bool
RecursiveASTVisitor<Derived>::TraverseQualifiedTypeLoc(QualifiedTypeLoc TL) {
// Move this over to the 'main' typeloc tree. Note that this is a
// move -- we pretend that we were really looking at the unqualified
// typeloc all along -- rather than a recursion, so we don't follow
// the normal CRTP plan of going through
// getDerived().TraverseTypeLoc. If we did, we'd be traversing
// twice for the same type (once as a QualifiedTypeLoc version of
// the type, once as an UnqualifiedTypeLoc version of the type),
// which in effect means we'd call VisitTypeLoc twice with the
// 'same' type. This solves that problem, at the cost of never
// seeing the qualified version of the type (unless the client
// subclasses TraverseQualifiedTypeLoc themselves). It's not a
// perfect solution. A perfect solution probably requires making
// QualifiedTypeLoc a wrapper around TypeLoc -- like QualType is a
// wrapper around Type* -- rather than being its own class in the
// type hierarchy.
return TraverseTypeLoc(TL.getUnqualifiedLoc());
}
DEF_TRAVERSE_TYPELOC(BuiltinType, {})
// FIXME: ComplexTypeLoc is unfinished
DEF_TRAVERSE_TYPELOC(ComplexType, {
TRY_TO(TraverseType(TL.getTypePtr()->getElementType()));
})
DEF_TRAVERSE_TYPELOC(PointerType,
{ TRY_TO(TraverseTypeLoc(TL.getPointeeLoc())); })
DEF_TRAVERSE_TYPELOC(BlockPointerType,
{ TRY_TO(TraverseTypeLoc(TL.getPointeeLoc())); })
DEF_TRAVERSE_TYPELOC(LValueReferenceType,
{ TRY_TO(TraverseTypeLoc(TL.getPointeeLoc())); })
DEF_TRAVERSE_TYPELOC(RValueReferenceType,
{ TRY_TO(TraverseTypeLoc(TL.getPointeeLoc())); })
// FIXME: location of base class?
// We traverse this in the type case as well, but how is it not reached through
// the pointee type?
DEF_TRAVERSE_TYPELOC(MemberPointerType, {
TRY_TO(TraverseType(QualType(TL.getTypePtr()->getClass(), 0)));
TRY_TO(TraverseTypeLoc(TL.getPointeeLoc()));
})
DEF_TRAVERSE_TYPELOC(AdjustedType,
{ TRY_TO(TraverseTypeLoc(TL.getOriginalLoc())); })
DEF_TRAVERSE_TYPELOC(DecayedType,
{ TRY_TO(TraverseTypeLoc(TL.getOriginalLoc())); })
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseArrayTypeLocHelper(ArrayTypeLoc TL) {
// This isn't available for ArrayType, but is for the ArrayTypeLoc.
TRY_TO(TraverseStmt(TL.getSizeExpr()));
return true;
}
DEF_TRAVERSE_TYPELOC(ConstantArrayType, {
TRY_TO(TraverseTypeLoc(TL.getElementLoc()));
return TraverseArrayTypeLocHelper(TL);
})
DEF_TRAVERSE_TYPELOC(IncompleteArrayType, {
TRY_TO(TraverseTypeLoc(TL.getElementLoc()));
return TraverseArrayTypeLocHelper(TL);
})
DEF_TRAVERSE_TYPELOC(VariableArrayType, {
TRY_TO(TraverseTypeLoc(TL.getElementLoc()));
return TraverseArrayTypeLocHelper(TL);
})
DEF_TRAVERSE_TYPELOC(DependentSizedArrayType, {
TRY_TO(TraverseTypeLoc(TL.getElementLoc()));
return TraverseArrayTypeLocHelper(TL);
})
DEF_TRAVERSE_TYPELOC(DependentAddressSpaceType, {
TRY_TO(TraverseStmt(TL.getTypePtr()->getAddrSpaceExpr()));
TRY_TO(TraverseType(TL.getTypePtr()->getPointeeType()));
})
// FIXME: order? why not size expr first?
// FIXME: base VectorTypeLoc is unfinished
DEF_TRAVERSE_TYPELOC(DependentSizedExtVectorType, {
if (TL.getTypePtr()->getSizeExpr())
TRY_TO(TraverseStmt(TL.getTypePtr()->getSizeExpr()));
TRY_TO(TraverseType(TL.getTypePtr()->getElementType()));
})
// FIXME: VectorTypeLoc is unfinished
DEF_TRAVERSE_TYPELOC(VectorType, {
TRY_TO(TraverseType(TL.getTypePtr()->getElementType()));
})
DEF_TRAVERSE_TYPELOC(DependentVectorType, {
if (TL.getTypePtr()->getSizeExpr())
TRY_TO(TraverseStmt(TL.getTypePtr()->getSizeExpr()));
TRY_TO(TraverseType(TL.getTypePtr()->getElementType()));
})
// FIXME: size and attributes
// FIXME: base VectorTypeLoc is unfinished
DEF_TRAVERSE_TYPELOC(ExtVectorType, {
TRY_TO(TraverseType(TL.getTypePtr()->getElementType()));
})
DEF_TRAVERSE_TYPELOC(FunctionNoProtoType,
{ TRY_TO(TraverseTypeLoc(TL.getReturnLoc())); })
// FIXME: location of exception specifications (attributes?)
DEF_TRAVERSE_TYPELOC(FunctionProtoType, {
TRY_TO(TraverseTypeLoc(TL.getReturnLoc()));
const FunctionProtoType *T = TL.getTypePtr();
for (unsigned I = 0, E = TL.getNumParams(); I != E; ++I) {
if (TL.getParam(I)) {
TRY_TO(TraverseDecl(TL.getParam(I)));
} else if (I < T->getNumParams()) {
TRY_TO(TraverseType(T->getParamType(I)));
}
}
for (const auto &E : T->exceptions()) {
TRY_TO(TraverseType(E));
}
if (Expr *NE = T->getNoexceptExpr())
TRY_TO(TraverseStmt(NE));
})
DEF_TRAVERSE_TYPELOC(UnresolvedUsingType, {})
DEF_TRAVERSE_TYPELOC(TypedefType, {})
DEF_TRAVERSE_TYPELOC(TypeOfExprType,
{ TRY_TO(TraverseStmt(TL.getUnderlyingExpr())); })
DEF_TRAVERSE_TYPELOC(TypeOfType, {
TRY_TO(TraverseTypeLoc(TL.getUnderlyingTInfo()->getTypeLoc()));
})
// FIXME: location of underlying expr
DEF_TRAVERSE_TYPELOC(DecltypeType, {
TRY_TO(TraverseStmt(TL.getTypePtr()->getUnderlyingExpr()));
})
DEF_TRAVERSE_TYPELOC(UnaryTransformType, {
TRY_TO(TraverseTypeLoc(TL.getUnderlyingTInfo()->getTypeLoc()));
})
DEF_TRAVERSE_TYPELOC(AutoType, {
TRY_TO(TraverseType(TL.getTypePtr()->getDeducedType()));
})
DEF_TRAVERSE_TYPELOC(DeducedTemplateSpecializationType, {
TRY_TO(TraverseTemplateName(TL.getTypePtr()->getTemplateName()));
TRY_TO(TraverseType(TL.getTypePtr()->getDeducedType()));
})
DEF_TRAVERSE_TYPELOC(RecordType, {})
DEF_TRAVERSE_TYPELOC(EnumType, {})
DEF_TRAVERSE_TYPELOC(TemplateTypeParmType, {})
DEF_TRAVERSE_TYPELOC(SubstTemplateTypeParmType, {
TRY_TO(TraverseType(TL.getTypePtr()->getReplacementType()));
})
DEF_TRAVERSE_TYPELOC(SubstTemplateTypeParmPackType, {
TRY_TO(TraverseTemplateArgument(TL.getTypePtr()->getArgumentPack()));
})
// FIXME: use the loc for the template name?
DEF_TRAVERSE_TYPELOC(TemplateSpecializationType, {
TRY_TO(TraverseTemplateName(TL.getTypePtr()->getTemplateName()));
for (unsigned I = 0, E = TL.getNumArgs(); I != E; ++I) {
TRY_TO(TraverseTemplateArgumentLoc(TL.getArgLoc(I)));
}
})
DEF_TRAVERSE_TYPELOC(InjectedClassNameType, {})
DEF_TRAVERSE_TYPELOC(ParenType, { TRY_TO(TraverseTypeLoc(TL.getInnerLoc())); })
DEF_TRAVERSE_TYPELOC(AttributedType,
{ TRY_TO(TraverseTypeLoc(TL.getModifiedLoc())); })
DEF_TRAVERSE_TYPELOC(ElaboratedType, {
if (TL.getQualifierLoc()) {
TRY_TO(TraverseNestedNameSpecifierLoc(TL.getQualifierLoc()));
}
TRY_TO(TraverseTypeLoc(TL.getNamedTypeLoc()));
})
DEF_TRAVERSE_TYPELOC(DependentNameType, {
TRY_TO(TraverseNestedNameSpecifierLoc(TL.getQualifierLoc()));
})
DEF_TRAVERSE_TYPELOC(DependentTemplateSpecializationType, {
if (TL.getQualifierLoc()) {
TRY_TO(TraverseNestedNameSpecifierLoc(TL.getQualifierLoc()));
}
for (unsigned I = 0, E = TL.getNumArgs(); I != E; ++I) {
TRY_TO(TraverseTemplateArgumentLoc(TL.getArgLoc(I)));
}
})
DEF_TRAVERSE_TYPELOC(PackExpansionType,
{ TRY_TO(TraverseTypeLoc(TL.getPatternLoc())); })
DEF_TRAVERSE_TYPELOC(ObjCTypeParamType, {})
DEF_TRAVERSE_TYPELOC(ObjCInterfaceType, {})
DEF_TRAVERSE_TYPELOC(ObjCObjectType, {
// We have to watch out here because an ObjCInterfaceType's base
// type is itself.
if (TL.getTypePtr()->getBaseType().getTypePtr() != TL.getTypePtr())
TRY_TO(TraverseTypeLoc(TL.getBaseLoc()));
for (unsigned i = 0, n = TL.getNumTypeArgs(); i != n; ++i)
TRY_TO(TraverseTypeLoc(TL.getTypeArgTInfo(i)->getTypeLoc()));
})
DEF_TRAVERSE_TYPELOC(ObjCObjectPointerType,
{ TRY_TO(TraverseTypeLoc(TL.getPointeeLoc())); })
DEF_TRAVERSE_TYPELOC(AtomicType, { TRY_TO(TraverseTypeLoc(TL.getValueLoc())); })
DEF_TRAVERSE_TYPELOC(PipeType, { TRY_TO(TraverseTypeLoc(TL.getValueLoc())); })
#undef DEF_TRAVERSE_TYPELOC
// ----------------- Decl traversal -----------------
//
// For a Decl, we automate (in the DEF_TRAVERSE_DECL macro) traversing
// the children that come from the DeclContext associated with it.
// Therefore each Traverse* only needs to worry about children other
// than those.
template <typename Derived>
bool RecursiveASTVisitor<Derived>::canIgnoreChildDeclWhileTraversingDeclContext(
const Decl *Child) {
// BlockDecls and CapturedDecls are traversed through BlockExprs and
// CapturedStmts respectively.
return isa<BlockDecl>(Child) || isa<CapturedDecl>(Child);
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseDeclContextHelper(DeclContext *DC) {
if (!DC)
return true;
for (auto *Child : DC->decls()) {
if (!canIgnoreChildDeclWhileTraversingDeclContext(Child))
TRY_TO(TraverseDecl(Child));
}
return true;
}
// This macro makes available a variable D, the passed-in decl.
#define DEF_TRAVERSE_DECL(DECL, CODE) \
template <typename Derived> \
bool RecursiveASTVisitor<Derived>::Traverse##DECL(DECL *D) { \
bool ShouldVisitChildren = true; \
bool ReturnValue = true; \
if (!getDerived().shouldTraversePostOrder()) \
TRY_TO(WalkUpFrom##DECL(D)); \
{ CODE; } \
if (ReturnValue && ShouldVisitChildren) \
TRY_TO(TraverseDeclContextHelper(dyn_cast<DeclContext>(D))); \
if (ReturnValue && getDerived().shouldTraversePostOrder()) \
TRY_TO(WalkUpFrom##DECL(D)); \
return ReturnValue; \
}
DEF_TRAVERSE_DECL(AccessSpecDecl, {})
DEF_TRAVERSE_DECL(BlockDecl, {
if (TypeSourceInfo *TInfo = D->getSignatureAsWritten())
TRY_TO(TraverseTypeLoc(TInfo->getTypeLoc()));
TRY_TO(TraverseStmt(D->getBody()));
for (const auto &I : D->captures()) {
if (I.hasCopyExpr()) {
TRY_TO(TraverseStmt(I.getCopyExpr()));
}
}
ShouldVisitChildren = false;
})
DEF_TRAVERSE_DECL(CapturedDecl, {
TRY_TO(TraverseStmt(D->getBody()));
ShouldVisitChildren = false;
})
DEF_TRAVERSE_DECL(EmptyDecl, {})
DEF_TRAVERSE_DECL(FileScopeAsmDecl,
{ TRY_TO(TraverseStmt(D->getAsmString())); })
DEF_TRAVERSE_DECL(ImportDecl, {})
DEF_TRAVERSE_DECL(FriendDecl, {
// Friend is either decl or a type.
if (D->getFriendType())
TRY_TO(TraverseTypeLoc(D->getFriendType()->getTypeLoc()));
else
TRY_TO(TraverseDecl(D->getFriendDecl()));
})
DEF_TRAVERSE_DECL(FriendTemplateDecl, {
if (D->getFriendType())
TRY_TO(TraverseTypeLoc(D->getFriendType()->getTypeLoc()));
else
TRY_TO(TraverseDecl(D->getFriendDecl()));
for (unsigned I = 0, E = D->getNumTemplateParameters(); I < E; ++I) {
TemplateParameterList *TPL = D->getTemplateParameterList(I);
for (TemplateParameterList::iterator ITPL = TPL->begin(), ETPL = TPL->end();
ITPL != ETPL; ++ITPL) {
TRY_TO(TraverseDecl(*ITPL));
}
}
})
DEF_TRAVERSE_DECL(ClassScopeFunctionSpecializationDecl, {
TRY_TO(TraverseDecl(D->getSpecialization()));
if (D->hasExplicitTemplateArgs()) {
const TemplateArgumentListInfo &args = D->templateArgs();
TRY_TO(TraverseTemplateArgumentLocsHelper(args.getArgumentArray(),
args.size()));
}
})
DEF_TRAVERSE_DECL(LinkageSpecDecl, {})
DEF_TRAVERSE_DECL(ExportDecl, {})
DEF_TRAVERSE_DECL(ObjCPropertyImplDecl, {// FIXME: implement this
})
DEF_TRAVERSE_DECL(StaticAssertDecl, {
TRY_TO(TraverseStmt(D->getAssertExpr()));
TRY_TO(TraverseStmt(D->getMessage()));
})
DEF_TRAVERSE_DECL(
TranslationUnitDecl,
{// Code in an unnamed namespace shows up automatically in
// decls_begin()/decls_end(). Thus we don't need to recurse on
// D->getAnonymousNamespace().
})
DEF_TRAVERSE_DECL(PragmaCommentDecl, {})
DEF_TRAVERSE_DECL(PragmaDetectMismatchDecl, {})
DEF_TRAVERSE_DECL(ExternCContextDecl, {})
DEF_TRAVERSE_DECL(NamespaceAliasDecl, {
TRY_TO(TraverseNestedNameSpecifierLoc(D->getQualifierLoc()));
// We shouldn't traverse an aliased namespace, since it will be
// defined (and, therefore, traversed) somewhere else.
ShouldVisitChildren = false;
})
DEF_TRAVERSE_DECL(LabelDecl, {// There is no code in a LabelDecl.
})
DEF_TRAVERSE_DECL(
NamespaceDecl,
{// Code in an unnamed namespace shows up automatically in
// decls_begin()/decls_end(). Thus we don't need to recurse on
// D->getAnonymousNamespace().
})
DEF_TRAVERSE_DECL(ObjCCompatibleAliasDecl, {// FIXME: implement
})
DEF_TRAVERSE_DECL(ObjCCategoryDecl, {// FIXME: implement
if (ObjCTypeParamList *typeParamList = D->getTypeParamList()) {
for (auto typeParam : *typeParamList) {
TRY_TO(TraverseObjCTypeParamDecl(typeParam));
}
}
})
DEF_TRAVERSE_DECL(ObjCCategoryImplDecl, {// FIXME: implement
})
DEF_TRAVERSE_DECL(ObjCImplementationDecl, {// FIXME: implement
})
DEF_TRAVERSE_DECL(ObjCInterfaceDecl, {// FIXME: implement
if (ObjCTypeParamList *typeParamList = D->getTypeParamListAsWritten()) {
for (auto typeParam : *typeParamList) {
TRY_TO(TraverseObjCTypeParamDecl(typeParam));
}
}
if (TypeSourceInfo *superTInfo = D->getSuperClassTInfo()) {
TRY_TO(TraverseTypeLoc(superTInfo->getTypeLoc()));
}
})
DEF_TRAVERSE_DECL(ObjCProtocolDecl, {// FIXME: implement
})
DEF_TRAVERSE_DECL(ObjCMethodDecl, {
if (D->getReturnTypeSourceInfo()) {
TRY_TO(TraverseTypeLoc(D->getReturnTypeSourceInfo()->getTypeLoc()));
}
for (ParmVarDecl *Parameter : D->parameters()) {
TRY_TO(TraverseDecl(Parameter));
}
if (D->isThisDeclarationADefinition()) {
TRY_TO(TraverseStmt(D->getBody()));
}
ShouldVisitChildren = false;
})
DEF_TRAVERSE_DECL(ObjCTypeParamDecl, {
if (D->hasExplicitBound()) {
TRY_TO(TraverseTypeLoc(D->getTypeSourceInfo()->getTypeLoc()));
// We shouldn't traverse D->getTypeForDecl(); it's a result of
// declaring the type alias, not something that was written in the
// source.
}
})
DEF_TRAVERSE_DECL(ObjCPropertyDecl, {
if (D->getTypeSourceInfo())
TRY_TO(TraverseTypeLoc(D->getTypeSourceInfo()->getTypeLoc()));
else
TRY_TO(TraverseType(D->getType()));
ShouldVisitChildren = false;
})
DEF_TRAVERSE_DECL(UsingDecl, {
TRY_TO(TraverseNestedNameSpecifierLoc(D->getQualifierLoc()));
TRY_TO(TraverseDeclarationNameInfo(D->getNameInfo()));
})
DEF_TRAVERSE_DECL(UsingPackDecl, {})
DEF_TRAVERSE_DECL(UsingDirectiveDecl, {
TRY_TO(TraverseNestedNameSpecifierLoc(D->getQualifierLoc()));
})
DEF_TRAVERSE_DECL(UsingShadowDecl, {})
DEF_TRAVERSE_DECL(ConstructorUsingShadowDecl, {})
DEF_TRAVERSE_DECL(OMPThreadPrivateDecl, {
for (auto *I : D->varlists()) {
TRY_TO(TraverseStmt(I));
}
})
DEF_TRAVERSE_DECL(OMPDeclareReductionDecl, {
TRY_TO(TraverseStmt(D->getCombiner()));
if (auto *Initializer = D->getInitializer())
TRY_TO(TraverseStmt(Initializer));
TRY_TO(TraverseType(D->getType()));
return true;
})
DEF_TRAVERSE_DECL(OMPCapturedExprDecl, { TRY_TO(TraverseVarHelper(D)); })
// A helper method for TemplateDecl's children.
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseTemplateParameterListHelper(
TemplateParameterList *TPL) {
if (TPL) {
for (TemplateParameterList::iterator I = TPL->begin(), E = TPL->end();
I != E; ++I) {
TRY_TO(TraverseDecl(*I));
}
}
return true;
}
template <typename Derived>
template <typename T>
bool RecursiveASTVisitor<Derived>::TraverseDeclTemplateParameterLists(T *D) {
for (unsigned i = 0; i < D->getNumTemplateParameterLists(); i++) {
TemplateParameterList *TPL = D->getTemplateParameterList(i);
TraverseTemplateParameterListHelper(TPL);
}
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseTemplateInstantiations(
ClassTemplateDecl *D) {
for (auto *SD : D->specializations()) {
for (auto *RD : SD->redecls()) {
// We don't want to visit injected-class-names in this traversal.
if (cast<CXXRecordDecl>(RD)->isInjectedClassName())
continue;
switch (
cast<ClassTemplateSpecializationDecl>(RD)->getSpecializationKind()) {
// Visit the implicit instantiations with the requested pattern.
case TSK_Undeclared:
case TSK_ImplicitInstantiation:
TRY_TO(TraverseDecl(RD));
break;
// We don't need to do anything on an explicit instantiation
// or explicit specialization because there will be an explicit
// node for it elsewhere.
case TSK_ExplicitInstantiationDeclaration:
case TSK_ExplicitInstantiationDefinition:
case TSK_ExplicitSpecialization:
break;
}
}
}
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseTemplateInstantiations(
VarTemplateDecl *D) {
for (auto *SD : D->specializations()) {
for (auto *RD : SD->redecls()) {
switch (
cast<VarTemplateSpecializationDecl>(RD)->getSpecializationKind()) {
case TSK_Undeclared:
case TSK_ImplicitInstantiation:
TRY_TO(TraverseDecl(RD));
break;
case TSK_ExplicitInstantiationDeclaration:
case TSK_ExplicitInstantiationDefinition:
case TSK_ExplicitSpecialization:
break;
}
}
}
return true;
}
// A helper method for traversing the instantiations of a
// function while skipping its specializations.
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseTemplateInstantiations(
FunctionTemplateDecl *D) {
for (auto *FD : D->specializations()) {
for (auto *RD : FD->redecls()) {
switch (RD->getTemplateSpecializationKind()) {
case TSK_Undeclared:
case TSK_ImplicitInstantiation:
// We don't know what kind of FunctionDecl this is.
TRY_TO(TraverseDecl(RD));
break;
// FIXME: For now traverse explicit instantiations here. Change that
// once they are represented as dedicated nodes in the AST.
case TSK_ExplicitInstantiationDeclaration:
case TSK_ExplicitInstantiationDefinition:
TRY_TO(TraverseDecl(RD));
break;
case TSK_ExplicitSpecialization:
break;
}
}
}
return true;
}
// This macro unifies the traversal of class, variable and function
// template declarations.
#define DEF_TRAVERSE_TMPL_DECL(TMPLDECLKIND) \
DEF_TRAVERSE_DECL(TMPLDECLKIND##TemplateDecl, { \
TRY_TO(TraverseTemplateParameterListHelper(D->getTemplateParameters())); \
TRY_TO(TraverseDecl(D->getTemplatedDecl())); \
\
/* By default, we do not traverse the instantiations of \
class templates since they do not appear in the user code. The \
following code optionally traverses them. \
\
We only traverse the class instantiations when we see the canonical \
declaration of the template, to ensure we only visit them once. */ \
if (getDerived().shouldVisitTemplateInstantiations() && \
D == D->getCanonicalDecl()) \
TRY_TO(TraverseTemplateInstantiations(D)); \
\
/* Note that getInstantiatedFromMemberTemplate() is just a link \
from a template instantiation back to the template from which \
it was instantiated, and thus should not be traversed. */ \
})
DEF_TRAVERSE_TMPL_DECL(Class)
DEF_TRAVERSE_TMPL_DECL(Var)
DEF_TRAVERSE_TMPL_DECL(Function)
DEF_TRAVERSE_DECL(TemplateTemplateParmDecl, {
// D is the "T" in something like
// template <template <typename> class T> class container { };
TRY_TO(TraverseDecl(D->getTemplatedDecl()));
if (D->hasDefaultArgument() && !D->defaultArgumentWasInherited()) {
TRY_TO(TraverseTemplateArgumentLoc(D->getDefaultArgument()));
}
TRY_TO(TraverseTemplateParameterListHelper(D->getTemplateParameters()));
})
DEF_TRAVERSE_DECL(BuiltinTemplateDecl, {
TRY_TO(TraverseTemplateParameterListHelper(D->getTemplateParameters()));
})
DEF_TRAVERSE_DECL(TemplateTypeParmDecl, {
// D is the "T" in something like "template<typename T> class vector;"
if (D->getTypeForDecl())
TRY_TO(TraverseType(QualType(D->getTypeForDecl(), 0)));
if (D->hasDefaultArgument() && !D->defaultArgumentWasInherited())
TRY_TO(TraverseTypeLoc(D->getDefaultArgumentInfo()->getTypeLoc()));
})
DEF_TRAVERSE_DECL(TypedefDecl, {
TRY_TO(TraverseTypeLoc(D->getTypeSourceInfo()->getTypeLoc()));
// We shouldn't traverse D->getTypeForDecl(); it's a result of
// declaring the typedef, not something that was written in the
// source.
})
DEF_TRAVERSE_DECL(TypeAliasDecl, {
TRY_TO(TraverseTypeLoc(D->getTypeSourceInfo()->getTypeLoc()));
// We shouldn't traverse D->getTypeForDecl(); it's a result of
// declaring the type alias, not something that was written in the
// source.
})
DEF_TRAVERSE_DECL(TypeAliasTemplateDecl, {
TRY_TO(TraverseDecl(D->getTemplatedDecl()));
TRY_TO(TraverseTemplateParameterListHelper(D->getTemplateParameters()));
})
DEF_TRAVERSE_DECL(UnresolvedUsingTypenameDecl, {
// A dependent using declaration which was marked with 'typename'.
// template<class T> class A : public B<T> { using typename B<T>::foo; };
TRY_TO(TraverseNestedNameSpecifierLoc(D->getQualifierLoc()));
// We shouldn't traverse D->getTypeForDecl(); it's a result of
// declaring the type, not something that was written in the
// source.
})
DEF_TRAVERSE_DECL(EnumDecl, {
TRY_TO(TraverseDeclTemplateParameterLists(D));
if (D->getTypeForDecl())
TRY_TO(TraverseType(QualType(D->getTypeForDecl(), 0)));
TRY_TO(TraverseNestedNameSpecifierLoc(D->getQualifierLoc()));
// The enumerators are already traversed by
// decls_begin()/decls_end().
})
// Helper methods for RecordDecl and its children.
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseRecordHelper(RecordDecl *D) {
// We shouldn't traverse D->getTypeForDecl(); it's a result of
// declaring the type, not something that was written in the source.
TRY_TO(TraverseDeclTemplateParameterLists(D));
TRY_TO(TraverseNestedNameSpecifierLoc(D->getQualifierLoc()));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseCXXBaseSpecifier(
const CXXBaseSpecifier &Base) {
TRY_TO(TraverseTypeLoc(Base.getTypeSourceInfo()->getTypeLoc()));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseCXXRecordHelper(CXXRecordDecl *D) {
if (!TraverseRecordHelper(D))
return false;
if (D->isCompleteDefinition()) {
for (const auto &I : D->bases()) {
TRY_TO(TraverseCXXBaseSpecifier(I));
}
// We don't traverse the friends or the conversions, as they are
// already in decls_begin()/decls_end().
}
return true;
}
DEF_TRAVERSE_DECL(RecordDecl, { TRY_TO(TraverseRecordHelper(D)); })
DEF_TRAVERSE_DECL(CXXRecordDecl, { TRY_TO(TraverseCXXRecordHelper(D)); })
#define DEF_TRAVERSE_TMPL_SPEC_DECL(TMPLDECLKIND) \
DEF_TRAVERSE_DECL(TMPLDECLKIND##TemplateSpecializationDecl, { \
/* For implicit instantiations ("set<int> x;"), we don't want to \
recurse at all, since the instatiated template isn't written in \
the source code anywhere. (Note the instatiated *type* -- \
set<int> -- is written, and will still get a callback of \
TemplateSpecializationType). For explicit instantiations \
("template set<int>;"), we do need a callback, since this \
is the only callback that's made for this instantiation. \
We use getTypeAsWritten() to distinguish. */ \
if (TypeSourceInfo *TSI = D->getTypeAsWritten()) \
TRY_TO(TraverseTypeLoc(TSI->getTypeLoc())); \
\
TRY_TO(TraverseNestedNameSpecifierLoc(D->getQualifierLoc())); \
if (!getDerived().shouldVisitTemplateInstantiations() && \
D->getTemplateSpecializationKind() != TSK_ExplicitSpecialization) \
/* Returning from here skips traversing the \
declaration context of the *TemplateSpecializationDecl \
(embedded in the DEF_TRAVERSE_DECL() macro) \
which contains the instantiated members of the template. */ \
return true; \
})
DEF_TRAVERSE_TMPL_SPEC_DECL(Class)
DEF_TRAVERSE_TMPL_SPEC_DECL(Var)
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseTemplateArgumentLocsHelper(
const TemplateArgumentLoc *TAL, unsigned Count) {
for (unsigned I = 0; I < Count; ++I) {
TRY_TO(TraverseTemplateArgumentLoc(TAL[I]));
}
return true;
}
#define DEF_TRAVERSE_TMPL_PART_SPEC_DECL(TMPLDECLKIND, DECLKIND) \
DEF_TRAVERSE_DECL(TMPLDECLKIND##TemplatePartialSpecializationDecl, { \
/* The partial specialization. */ \
if (TemplateParameterList *TPL = D->getTemplateParameters()) { \
for (TemplateParameterList::iterator I = TPL->begin(), E = TPL->end(); \
I != E; ++I) { \
TRY_TO(TraverseDecl(*I)); \
} \
} \
/* The args that remains unspecialized. */ \
TRY_TO(TraverseTemplateArgumentLocsHelper( \
D->getTemplateArgsAsWritten()->getTemplateArgs(), \
D->getTemplateArgsAsWritten()->NumTemplateArgs)); \
\
/* Don't need the *TemplatePartialSpecializationHelper, even \
though that's our parent class -- we already visit all the \
template args here. */ \
TRY_TO(Traverse##DECLKIND##Helper(D)); \
\
/* Instantiations will have been visited with the primary template. */ \
})
DEF_TRAVERSE_TMPL_PART_SPEC_DECL(Class, CXXRecord)
DEF_TRAVERSE_TMPL_PART_SPEC_DECL(Var, Var)
DEF_TRAVERSE_DECL(EnumConstantDecl, { TRY_TO(TraverseStmt(D->getInitExpr())); })
DEF_TRAVERSE_DECL(UnresolvedUsingValueDecl, {
// Like UnresolvedUsingTypenameDecl, but without the 'typename':
// template <class T> Class A : public Base<T> { using Base<T>::foo; };
TRY_TO(TraverseNestedNameSpecifierLoc(D->getQualifierLoc()));
TRY_TO(TraverseDeclarationNameInfo(D->getNameInfo()));
})
DEF_TRAVERSE_DECL(IndirectFieldDecl, {})
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseDeclaratorHelper(DeclaratorDecl *D) {
TRY_TO(TraverseDeclTemplateParameterLists(D));
TRY_TO(TraverseNestedNameSpecifierLoc(D->getQualifierLoc()));
if (D->getTypeSourceInfo())
TRY_TO(TraverseTypeLoc(D->getTypeSourceInfo()->getTypeLoc()));
else
TRY_TO(TraverseType(D->getType()));
return true;
}
DEF_TRAVERSE_DECL(DecompositionDecl, {
TRY_TO(TraverseVarHelper(D));
for (auto *Binding : D->bindings()) {
TRY_TO(TraverseDecl(Binding));
}
})
DEF_TRAVERSE_DECL(BindingDecl, {
if (getDerived().shouldVisitImplicitCode())
TRY_TO(TraverseStmt(D->getBinding()));
})
DEF_TRAVERSE_DECL(MSPropertyDecl, { TRY_TO(TraverseDeclaratorHelper(D)); })
DEF_TRAVERSE_DECL(FieldDecl, {
TRY_TO(TraverseDeclaratorHelper(D));
if (D->isBitField())
TRY_TO(TraverseStmt(D->getBitWidth()));
else if (D->hasInClassInitializer())
TRY_TO(TraverseStmt(D->getInClassInitializer()));
})
DEF_TRAVERSE_DECL(ObjCAtDefsFieldDecl, {
TRY_TO(TraverseDeclaratorHelper(D));
if (D->isBitField())
TRY_TO(TraverseStmt(D->getBitWidth()));
// FIXME: implement the rest.
})
DEF_TRAVERSE_DECL(ObjCIvarDecl, {
TRY_TO(TraverseDeclaratorHelper(D));
if (D->isBitField())
TRY_TO(TraverseStmt(D->getBitWidth()));
// FIXME: implement the rest.
})
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseFunctionHelper(FunctionDecl *D) {
TRY_TO(TraverseDeclTemplateParameterLists(D));
TRY_TO(TraverseNestedNameSpecifierLoc(D->getQualifierLoc()));
TRY_TO(TraverseDeclarationNameInfo(D->getNameInfo()));
// If we're an explicit template specialization, iterate over the
// template args that were explicitly specified. If we were doing
// this in typing order, we'd do it between the return type and
// the function args, but both are handled by the FunctionTypeLoc
// above, so we have to choose one side. I've decided to do before.
if (const FunctionTemplateSpecializationInfo *FTSI =
D->getTemplateSpecializationInfo()) {
if (FTSI->getTemplateSpecializationKind() != TSK_Undeclared &&
FTSI->getTemplateSpecializationKind() != TSK_ImplicitInstantiation) {
// A specialization might not have explicit template arguments if it has
// a templated return type and concrete arguments.
if (const ASTTemplateArgumentListInfo *TALI =
FTSI->TemplateArgumentsAsWritten) {
TRY_TO(TraverseTemplateArgumentLocsHelper(TALI->getTemplateArgs(),
TALI->NumTemplateArgs));
}
}
}
// Visit the function type itself, which can be either
// FunctionNoProtoType or FunctionProtoType, or a typedef. This
// also covers the return type and the function parameters,
// including exception specifications.
if (TypeSourceInfo *TSI = D->getTypeSourceInfo()) {
TRY_TO(TraverseTypeLoc(TSI->getTypeLoc()));
} else if (getDerived().shouldVisitImplicitCode()) {
// Visit parameter variable declarations of the implicit function
// if the traverser is visiting implicit code. Parameter variable
// declarations do not have valid TypeSourceInfo, so to visit them
// we need to traverse the declarations explicitly.
for (ParmVarDecl *Parameter : D->parameters()) {
TRY_TO(TraverseDecl(Parameter));
}
}
if (CXXConstructorDecl *Ctor = dyn_cast<CXXConstructorDecl>(D)) {
// Constructor initializers.
for (auto *I : Ctor->inits()) {
TRY_TO(TraverseConstructorInitializer(I));
}
}
if (D->isThisDeclarationADefinition()) {
TRY_TO(TraverseStmt(D->getBody())); // Function body.
}
return true;
}
DEF_TRAVERSE_DECL(FunctionDecl, {
// We skip decls_begin/decls_end, which are already covered by
// TraverseFunctionHelper().
ShouldVisitChildren = false;
ReturnValue = TraverseFunctionHelper(D);
})
DEF_TRAVERSE_DECL(CXXDeductionGuideDecl, {
// We skip decls_begin/decls_end, which are already covered by
// TraverseFunctionHelper().
ShouldVisitChildren = false;
ReturnValue = TraverseFunctionHelper(D);
})
DEF_TRAVERSE_DECL(CXXMethodDecl, {
// We skip decls_begin/decls_end, which are already covered by
// TraverseFunctionHelper().
ShouldVisitChildren = false;
ReturnValue = TraverseFunctionHelper(D);
})
DEF_TRAVERSE_DECL(CXXConstructorDecl, {
// We skip decls_begin/decls_end, which are already covered by
// TraverseFunctionHelper().
ShouldVisitChildren = false;
ReturnValue = TraverseFunctionHelper(D);
})
// CXXConversionDecl is the declaration of a type conversion operator.
// It's not a cast expression.
DEF_TRAVERSE_DECL(CXXConversionDecl, {
// We skip decls_begin/decls_end, which are already covered by
// TraverseFunctionHelper().
ShouldVisitChildren = false;
ReturnValue = TraverseFunctionHelper(D);
})
DEF_TRAVERSE_DECL(CXXDestructorDecl, {
// We skip decls_begin/decls_end, which are already covered by
// TraverseFunctionHelper().
ShouldVisitChildren = false;
ReturnValue = TraverseFunctionHelper(D);
})
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseVarHelper(VarDecl *D) {
TRY_TO(TraverseDeclaratorHelper(D));
// Default params are taken care of when we traverse the ParmVarDecl.
if (!isa<ParmVarDecl>(D) &&
(!D->isCXXForRangeDecl() || getDerived().shouldVisitImplicitCode()))
TRY_TO(TraverseStmt(D->getInit()));
return true;
}
DEF_TRAVERSE_DECL(VarDecl, { TRY_TO(TraverseVarHelper(D)); })
DEF_TRAVERSE_DECL(ImplicitParamDecl, { TRY_TO(TraverseVarHelper(D)); })
DEF_TRAVERSE_DECL(NonTypeTemplateParmDecl, {
// A non-type template parameter, e.g. "S" in template<int S> class Foo ...
TRY_TO(TraverseDeclaratorHelper(D));
if (D->hasDefaultArgument() && !D->defaultArgumentWasInherited())
TRY_TO(TraverseStmt(D->getDefaultArgument()));
})
DEF_TRAVERSE_DECL(ParmVarDecl, {
TRY_TO(TraverseVarHelper(D));
if (D->hasDefaultArg() && D->hasUninstantiatedDefaultArg() &&
!D->hasUnparsedDefaultArg())
TRY_TO(TraverseStmt(D->getUninstantiatedDefaultArg()));
if (D->hasDefaultArg() && !D->hasUninstantiatedDefaultArg() &&
!D->hasUnparsedDefaultArg())
TRY_TO(TraverseStmt(D->getDefaultArg()));
})
#undef DEF_TRAVERSE_DECL
// ----------------- Stmt traversal -----------------
//
// For stmts, we automate (in the DEF_TRAVERSE_STMT macro) iterating
// over the children defined in children() (every stmt defines these,
// though sometimes the range is empty). Each individual Traverse*
// method only needs to worry about children other than those. To see
// what children() does for a given class, see, e.g.,
// http://clang.llvm.org/doxygen/Stmt_8cpp_source.html
// This macro makes available a variable S, the passed-in stmt.
#define DEF_TRAVERSE_STMT(STMT, CODE) \
template <typename Derived> \
bool RecursiveASTVisitor<Derived>::Traverse##STMT( \
STMT *S, DataRecursionQueue *Queue) { \
bool ShouldVisitChildren = true; \
bool ReturnValue = true; \
if (!getDerived().shouldTraversePostOrder()) \
TRY_TO(WalkUpFrom##STMT(S)); \
{ CODE; } \
if (ShouldVisitChildren) { \
for (Stmt * SubStmt : getDerived().getStmtChildren(S)) { \
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(SubStmt); \
} \
} \
if (!Queue && ReturnValue && getDerived().shouldTraversePostOrder()) \
TRY_TO(WalkUpFrom##STMT(S)); \
return ReturnValue; \
}
DEF_TRAVERSE_STMT(GCCAsmStmt, {
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(S->getAsmString());
for (unsigned I = 0, E = S->getNumInputs(); I < E; ++I) {
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(S->getInputConstraintLiteral(I));
}
for (unsigned I = 0, E = S->getNumOutputs(); I < E; ++I) {
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(S->getOutputConstraintLiteral(I));
}
for (unsigned I = 0, E = S->getNumClobbers(); I < E; ++I) {
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(S->getClobberStringLiteral(I));
}
// children() iterates over inputExpr and outputExpr.
})
DEF_TRAVERSE_STMT(
MSAsmStmt,
{// FIXME: MS Asm doesn't currently parse Constraints, Clobbers, etc. Once
// added this needs to be implemented.
})
DEF_TRAVERSE_STMT(CXXCatchStmt, {
TRY_TO(TraverseDecl(S->getExceptionDecl()));
// children() iterates over the handler block.
})
DEF_TRAVERSE_STMT(DeclStmt, {
for (auto *I : S->decls()) {
TRY_TO(TraverseDecl(I));
}
// Suppress the default iteration over children() by
// returning. Here's why: A DeclStmt looks like 'type var [=
// initializer]'. The decls above already traverse over the
// initializers, so we don't have to do it again (which
// children() would do).
ShouldVisitChildren = false;
})
// These non-expr stmts (most of them), do not need any action except
// iterating over the children.
DEF_TRAVERSE_STMT(BreakStmt, {})
DEF_TRAVERSE_STMT(CXXTryStmt, {})
DEF_TRAVERSE_STMT(CaseStmt, {})
DEF_TRAVERSE_STMT(CompoundStmt, {})
DEF_TRAVERSE_STMT(ContinueStmt, {})
DEF_TRAVERSE_STMT(DefaultStmt, {})
DEF_TRAVERSE_STMT(DoStmt, {})
DEF_TRAVERSE_STMT(ForStmt, {})
DEF_TRAVERSE_STMT(GotoStmt, {})
DEF_TRAVERSE_STMT(IfStmt, {})
DEF_TRAVERSE_STMT(IndirectGotoStmt, {})
DEF_TRAVERSE_STMT(LabelStmt, {})
DEF_TRAVERSE_STMT(AttributedStmt, {})
DEF_TRAVERSE_STMT(NullStmt, {})
DEF_TRAVERSE_STMT(ObjCAtCatchStmt, {})
DEF_TRAVERSE_STMT(ObjCAtFinallyStmt, {})
DEF_TRAVERSE_STMT(ObjCAtSynchronizedStmt, {})
DEF_TRAVERSE_STMT(ObjCAtThrowStmt, {})
DEF_TRAVERSE_STMT(ObjCAtTryStmt, {})
DEF_TRAVERSE_STMT(ObjCForCollectionStmt, {})
DEF_TRAVERSE_STMT(ObjCAutoreleasePoolStmt, {})
DEF_TRAVERSE_STMT(CXXForRangeStmt, {
if (!getDerived().shouldVisitImplicitCode()) {
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(S->getLoopVarStmt());
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(S->getRangeInit());
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(S->getBody());
// Visit everything else only if shouldVisitImplicitCode().
ShouldVisitChildren = false;
}
})
DEF_TRAVERSE_STMT(MSDependentExistsStmt, {
TRY_TO(TraverseNestedNameSpecifierLoc(S->getQualifierLoc()));
TRY_TO(TraverseDeclarationNameInfo(S->getNameInfo()));
})
DEF_TRAVERSE_STMT(ReturnStmt, {})
DEF_TRAVERSE_STMT(SwitchStmt, {})
DEF_TRAVERSE_STMT(WhileStmt, {})
DEF_TRAVERSE_STMT(CXXDependentScopeMemberExpr, {
TRY_TO(TraverseNestedNameSpecifierLoc(S->getQualifierLoc()));
TRY_TO(TraverseDeclarationNameInfo(S->getMemberNameInfo()));
if (S->hasExplicitTemplateArgs()) {
TRY_TO(TraverseTemplateArgumentLocsHelper(S->getTemplateArgs(),
S->getNumTemplateArgs()));
}
})
DEF_TRAVERSE_STMT(DeclRefExpr, {
TRY_TO(TraverseNestedNameSpecifierLoc(S->getQualifierLoc()));
TRY_TO(TraverseDeclarationNameInfo(S->getNameInfo()));
TRY_TO(TraverseTemplateArgumentLocsHelper(S->getTemplateArgs(),
S->getNumTemplateArgs()));
})
DEF_TRAVERSE_STMT(DependentScopeDeclRefExpr, {
TRY_TO(TraverseNestedNameSpecifierLoc(S->getQualifierLoc()));
TRY_TO(TraverseDeclarationNameInfo(S->getNameInfo()));
if (S->hasExplicitTemplateArgs()) {
TRY_TO(TraverseTemplateArgumentLocsHelper(S->getTemplateArgs(),
S->getNumTemplateArgs()));
}
})
DEF_TRAVERSE_STMT(MemberExpr, {
TRY_TO(TraverseNestedNameSpecifierLoc(S->getQualifierLoc()));
TRY_TO(TraverseDeclarationNameInfo(S->getMemberNameInfo()));
TRY_TO(TraverseTemplateArgumentLocsHelper(S->getTemplateArgs(),
S->getNumTemplateArgs()));
})
DEF_TRAVERSE_STMT(
ImplicitCastExpr,
{// We don't traverse the cast type, as it's not written in the
// source code.
})
DEF_TRAVERSE_STMT(CStyleCastExpr, {
TRY_TO(TraverseTypeLoc(S->getTypeInfoAsWritten()->getTypeLoc()));
})
DEF_TRAVERSE_STMT(CXXFunctionalCastExpr, {
TRY_TO(TraverseTypeLoc(S->getTypeInfoAsWritten()->getTypeLoc()));
})
DEF_TRAVERSE_STMT(CXXConstCastExpr, {
TRY_TO(TraverseTypeLoc(S->getTypeInfoAsWritten()->getTypeLoc()));
})
DEF_TRAVERSE_STMT(CXXDynamicCastExpr, {
TRY_TO(TraverseTypeLoc(S->getTypeInfoAsWritten()->getTypeLoc()));
})
DEF_TRAVERSE_STMT(CXXReinterpretCastExpr, {
TRY_TO(TraverseTypeLoc(S->getTypeInfoAsWritten()->getTypeLoc()));
})
DEF_TRAVERSE_STMT(CXXStaticCastExpr, {
TRY_TO(TraverseTypeLoc(S->getTypeInfoAsWritten()->getTypeLoc()));
})
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseSynOrSemInitListExpr(
InitListExpr *S, DataRecursionQueue *Queue) {
if (S) {
// Skip this if we traverse postorder. We will visit it later
// in PostVisitStmt.
if (!getDerived().shouldTraversePostOrder())
TRY_TO(WalkUpFromInitListExpr(S));
// All we need are the default actions. FIXME: use a helper function.
for (Stmt *SubStmt : S->children()) {
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(SubStmt);
}
}
return true;
}
// This method is called once for each pair of syntactic and semantic
// InitListExpr, and it traverses the subtrees defined by the two forms. This
// may cause some of the children to be visited twice, if they appear both in
// the syntactic and the semantic form.
//
// There is no guarantee about which form \p S takes when this method is called.
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseInitListExpr(
InitListExpr *S, DataRecursionQueue *Queue) {
TRY_TO(TraverseSynOrSemInitListExpr(
S->isSemanticForm() ? S->getSyntacticForm() : S, Queue));
TRY_TO(TraverseSynOrSemInitListExpr(
S->isSemanticForm() ? S : S->getSemanticForm(), Queue));
return true;
}
// GenericSelectionExpr is a special case because the types and expressions
// are interleaved. We also need to watch out for null types (default
// generic associations).
DEF_TRAVERSE_STMT(GenericSelectionExpr, {
TRY_TO(TraverseStmt(S->getControllingExpr()));
for (unsigned i = 0; i != S->getNumAssocs(); ++i) {
if (TypeSourceInfo *TS = S->getAssocTypeSourceInfo(i))
TRY_TO(TraverseTypeLoc(TS->getTypeLoc()));
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(S->getAssocExpr(i));
}
ShouldVisitChildren = false;
})
// PseudoObjectExpr is a special case because of the weirdness with
// syntactic expressions and opaque values.
DEF_TRAVERSE_STMT(PseudoObjectExpr, {
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(S->getSyntacticForm());
for (PseudoObjectExpr::semantics_iterator i = S->semantics_begin(),
e = S->semantics_end();
i != e; ++i) {
Expr *sub = *i;
if (OpaqueValueExpr *OVE = dyn_cast<OpaqueValueExpr>(sub))
sub = OVE->getSourceExpr();
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(sub);
}
ShouldVisitChildren = false;
})
DEF_TRAVERSE_STMT(CXXScalarValueInitExpr, {
// This is called for code like 'return T()' where T is a built-in
// (i.e. non-class) type.
TRY_TO(TraverseTypeLoc(S->getTypeSourceInfo()->getTypeLoc()));
})
DEF_TRAVERSE_STMT(CXXNewExpr, {
// The child-iterator will pick up the other arguments.
TRY_TO(TraverseTypeLoc(S->getAllocatedTypeSourceInfo()->getTypeLoc()));
})
DEF_TRAVERSE_STMT(OffsetOfExpr, {
// The child-iterator will pick up the expression representing
// the field.
// FIMXE: for code like offsetof(Foo, a.b.c), should we get
// making a MemberExpr callbacks for Foo.a, Foo.a.b, and Foo.a.b.c?
TRY_TO(TraverseTypeLoc(S->getTypeSourceInfo()->getTypeLoc()));
})
DEF_TRAVERSE_STMT(UnaryExprOrTypeTraitExpr, {
// The child-iterator will pick up the arg if it's an expression,
// but not if it's a type.
if (S->isArgumentType())
TRY_TO(TraverseTypeLoc(S->getArgumentTypeInfo()->getTypeLoc()));
})
DEF_TRAVERSE_STMT(CXXTypeidExpr, {
// The child-iterator will pick up the arg if it's an expression,
// but not if it's a type.
if (S->isTypeOperand())
TRY_TO(TraverseTypeLoc(S->getTypeOperandSourceInfo()->getTypeLoc()));
})
DEF_TRAVERSE_STMT(MSPropertyRefExpr, {
TRY_TO(TraverseNestedNameSpecifierLoc(S->getQualifierLoc()));
})
DEF_TRAVERSE_STMT(MSPropertySubscriptExpr, {})
DEF_TRAVERSE_STMT(CXXUuidofExpr, {
// The child-iterator will pick up the arg if it's an expression,
// but not if it's a type.
if (S->isTypeOperand())
TRY_TO(TraverseTypeLoc(S->getTypeOperandSourceInfo()->getTypeLoc()));
})
DEF_TRAVERSE_STMT(TypeTraitExpr, {
for (unsigned I = 0, N = S->getNumArgs(); I != N; ++I)
TRY_TO(TraverseTypeLoc(S->getArg(I)->getTypeLoc()));
})
DEF_TRAVERSE_STMT(ArrayTypeTraitExpr, {
TRY_TO(TraverseTypeLoc(S->getQueriedTypeSourceInfo()->getTypeLoc()));
})
DEF_TRAVERSE_STMT(ExpressionTraitExpr,
{ TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(S->getQueriedExpression()); })
DEF_TRAVERSE_STMT(VAArgExpr, {
// The child-iterator will pick up the expression argument.
TRY_TO(TraverseTypeLoc(S->getWrittenTypeInfo()->getTypeLoc()));
})
DEF_TRAVERSE_STMT(CXXTemporaryObjectExpr, {
// This is called for code like 'return T()' where T is a class type.
TRY_TO(TraverseTypeLoc(S->getTypeSourceInfo()->getTypeLoc()));
})
// Walk only the visible parts of lambda expressions.
DEF_TRAVERSE_STMT(LambdaExpr, {
for (unsigned I = 0, N = S->capture_size(); I != N; ++I) {
const LambdaCapture *C = S->capture_begin() + I;
if (C->isExplicit() || getDerived().shouldVisitImplicitCode()) {
TRY_TO(TraverseLambdaCapture(S, C, S->capture_init_begin()[I]));
}
}
TypeLoc TL = S->getCallOperator()->getTypeSourceInfo()->getTypeLoc();
FunctionProtoTypeLoc Proto = TL.getAsAdjusted<FunctionProtoTypeLoc>();
if (S->hasExplicitParameters() && S->hasExplicitResultType()) {
// Visit the whole type.
TRY_TO(TraverseTypeLoc(TL));
} else {
if (S->hasExplicitParameters()) {
// Visit parameters.
for (unsigned I = 0, N = Proto.getNumParams(); I != N; ++I) {
TRY_TO(TraverseDecl(Proto.getParam(I)));
}
} else if (S->hasExplicitResultType()) {
TRY_TO(TraverseTypeLoc(Proto.getReturnLoc()));
}
auto *T = Proto.getTypePtr();
for (const auto &E : T->exceptions()) {
TRY_TO(TraverseType(E));
}
if (Expr *NE = T->getNoexceptExpr())
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(NE);
}
ReturnValue = TRAVERSE_STMT_BASE(LambdaBody, LambdaExpr, S, Queue);
ShouldVisitChildren = false;
})
DEF_TRAVERSE_STMT(CXXUnresolvedConstructExpr, {
// This is called for code like 'T()', where T is a template argument.
TRY_TO(TraverseTypeLoc(S->getTypeSourceInfo()->getTypeLoc()));
})
// These expressions all might take explicit template arguments.
// We traverse those if so. FIXME: implement these.
DEF_TRAVERSE_STMT(CXXConstructExpr, {})
DEF_TRAVERSE_STMT(CallExpr, {})
DEF_TRAVERSE_STMT(CXXMemberCallExpr, {})
// These exprs (most of them), do not need any action except iterating
// over the children.
DEF_TRAVERSE_STMT(AddrLabelExpr, {})
DEF_TRAVERSE_STMT(ArraySubscriptExpr, {})
DEF_TRAVERSE_STMT(OMPArraySectionExpr, {})
DEF_TRAVERSE_STMT(BlockExpr, {
TRY_TO(TraverseDecl(S->getBlockDecl()));
return true; // no child statements to loop through.
})
DEF_TRAVERSE_STMT(ChooseExpr, {})
DEF_TRAVERSE_STMT(CompoundLiteralExpr, {
TRY_TO(TraverseTypeLoc(S->getTypeSourceInfo()->getTypeLoc()));
})
DEF_TRAVERSE_STMT(CXXBindTemporaryExpr, {})
DEF_TRAVERSE_STMT(CXXBoolLiteralExpr, {})
DEF_TRAVERSE_STMT(CXXDefaultArgExpr, {
if (getDerived().shouldVisitImplicitCode())
TRY_TO(TraverseStmt(S->getExpr()));
})
DEF_TRAVERSE_STMT(CXXDefaultInitExpr, {})
DEF_TRAVERSE_STMT(CXXDeleteExpr, {})
DEF_TRAVERSE_STMT(ExprWithCleanups, {})
DEF_TRAVERSE_STMT(CXXInheritedCtorInitExpr, {})
DEF_TRAVERSE_STMT(CXXNullPtrLiteralExpr, {})
DEF_TRAVERSE_STMT(CXXStdInitializerListExpr, {})
DEF_TRAVERSE_STMT(CXXPseudoDestructorExpr, {
TRY_TO(TraverseNestedNameSpecifierLoc(S->getQualifierLoc()));
if (TypeSourceInfo *ScopeInfo = S->getScopeTypeInfo())
TRY_TO(TraverseTypeLoc(ScopeInfo->getTypeLoc()));
if (TypeSourceInfo *DestroyedTypeInfo = S->getDestroyedTypeInfo())
TRY_TO(TraverseTypeLoc(DestroyedTypeInfo->getTypeLoc()));
})
DEF_TRAVERSE_STMT(CXXThisExpr, {})
DEF_TRAVERSE_STMT(CXXThrowExpr, {})
DEF_TRAVERSE_STMT(UserDefinedLiteral, {})
DEF_TRAVERSE_STMT(DesignatedInitExpr, {})
DEF_TRAVERSE_STMT(DesignatedInitUpdateExpr, {})
DEF_TRAVERSE_STMT(ExtVectorElementExpr, {})
DEF_TRAVERSE_STMT(GNUNullExpr, {})
DEF_TRAVERSE_STMT(ImplicitValueInitExpr, {})
DEF_TRAVERSE_STMT(NoInitExpr, {})
DEF_TRAVERSE_STMT(ArrayInitLoopExpr, {
// FIXME: The source expression of the OVE should be listed as
// a child of the ArrayInitLoopExpr.
if (OpaqueValueExpr *OVE = S->getCommonExpr())
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(OVE->getSourceExpr());
})
DEF_TRAVERSE_STMT(ArrayInitIndexExpr, {})
DEF_TRAVERSE_STMT(ObjCBoolLiteralExpr, {})
DEF_TRAVERSE_STMT(ObjCEncodeExpr, {
if (TypeSourceInfo *TInfo = S->getEncodedTypeSourceInfo())
TRY_TO(TraverseTypeLoc(TInfo->getTypeLoc()));
})
DEF_TRAVERSE_STMT(ObjCIsaExpr, {})
DEF_TRAVERSE_STMT(ObjCIvarRefExpr, {})
DEF_TRAVERSE_STMT(ObjCMessageExpr, {
if (TypeSourceInfo *TInfo = S->getClassReceiverTypeInfo())
TRY_TO(TraverseTypeLoc(TInfo->getTypeLoc()));
})
DEF_TRAVERSE_STMT(ObjCPropertyRefExpr, {})
DEF_TRAVERSE_STMT(ObjCSubscriptRefExpr, {})
DEF_TRAVERSE_STMT(ObjCProtocolExpr, {})
DEF_TRAVERSE_STMT(ObjCSelectorExpr, {})
DEF_TRAVERSE_STMT(ObjCIndirectCopyRestoreExpr, {})
DEF_TRAVERSE_STMT(ObjCBridgedCastExpr, {
TRY_TO(TraverseTypeLoc(S->getTypeInfoAsWritten()->getTypeLoc()));
})
DEF_TRAVERSE_STMT(ObjCAvailabilityCheckExpr, {})
DEF_TRAVERSE_STMT(ParenExpr, {})
DEF_TRAVERSE_STMT(ParenListExpr, {})
DEF_TRAVERSE_STMT(PredefinedExpr, {})
DEF_TRAVERSE_STMT(ShuffleVectorExpr, {})
DEF_TRAVERSE_STMT(ConvertVectorExpr, {})
DEF_TRAVERSE_STMT(StmtExpr, {})
DEF_TRAVERSE_STMT(UnresolvedLookupExpr, {
TRY_TO(TraverseNestedNameSpecifierLoc(S->getQualifierLoc()));
if (S->hasExplicitTemplateArgs()) {
TRY_TO(TraverseTemplateArgumentLocsHelper(S->getTemplateArgs(),
S->getNumTemplateArgs()));
}
})
DEF_TRAVERSE_STMT(UnresolvedMemberExpr, {
TRY_TO(TraverseNestedNameSpecifierLoc(S->getQualifierLoc()));
if (S->hasExplicitTemplateArgs()) {
TRY_TO(TraverseTemplateArgumentLocsHelper(S->getTemplateArgs(),
S->getNumTemplateArgs()));
}
})
DEF_TRAVERSE_STMT(SEHTryStmt, {})
DEF_TRAVERSE_STMT(SEHExceptStmt, {})
DEF_TRAVERSE_STMT(SEHFinallyStmt, {})
DEF_TRAVERSE_STMT(SEHLeaveStmt, {})
DEF_TRAVERSE_STMT(CapturedStmt, { TRY_TO(TraverseDecl(S->getCapturedDecl())); })
DEF_TRAVERSE_STMT(CXXOperatorCallExpr, {})
DEF_TRAVERSE_STMT(OpaqueValueExpr, {})
DEF_TRAVERSE_STMT(TypoExpr, {})
DEF_TRAVERSE_STMT(CUDAKernelCallExpr, {})
// These operators (all of them) do not need any action except
// iterating over the children.
DEF_TRAVERSE_STMT(BinaryConditionalOperator, {})
DEF_TRAVERSE_STMT(ConditionalOperator, {})
DEF_TRAVERSE_STMT(UnaryOperator, {})
DEF_TRAVERSE_STMT(BinaryOperator, {})
DEF_TRAVERSE_STMT(CompoundAssignOperator, {})
DEF_TRAVERSE_STMT(CXXNoexceptExpr, {})
DEF_TRAVERSE_STMT(PackExpansionExpr, {})
DEF_TRAVERSE_STMT(SizeOfPackExpr, {})
DEF_TRAVERSE_STMT(SubstNonTypeTemplateParmPackExpr, {})
DEF_TRAVERSE_STMT(SubstNonTypeTemplateParmExpr, {})
DEF_TRAVERSE_STMT(FunctionParmPackExpr, {})
DEF_TRAVERSE_STMT(MaterializeTemporaryExpr, {})
DEF_TRAVERSE_STMT(CXXFoldExpr, {})
DEF_TRAVERSE_STMT(AtomicExpr, {})
// For coroutines expressions, traverse either the operand
// as written or the implied calls, depending on what the
// derived class requests.
DEF_TRAVERSE_STMT(CoroutineBodyStmt, {
if (!getDerived().shouldVisitImplicitCode()) {
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(S->getBody());
ShouldVisitChildren = false;
}
})
DEF_TRAVERSE_STMT(CoreturnStmt, {
if (!getDerived().shouldVisitImplicitCode()) {
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(S->getOperand());
ShouldVisitChildren = false;
}
})
DEF_TRAVERSE_STMT(CoawaitExpr, {
if (!getDerived().shouldVisitImplicitCode()) {
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(S->getOperand());
ShouldVisitChildren = false;
}
})
DEF_TRAVERSE_STMT(DependentCoawaitExpr, {
if (!getDerived().shouldVisitImplicitCode()) {
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(S->getOperand());
ShouldVisitChildren = false;
}
})
DEF_TRAVERSE_STMT(CoyieldExpr, {
if (!getDerived().shouldVisitImplicitCode()) {
TRY_TO_TRAVERSE_OR_ENQUEUE_STMT(S->getOperand());
ShouldVisitChildren = false;
}
})
// These literals (all of them) do not need any action.
DEF_TRAVERSE_STMT(IntegerLiteral, {})
DEF_TRAVERSE_STMT(FixedPointLiteral, {})
DEF_TRAVERSE_STMT(CharacterLiteral, {})
DEF_TRAVERSE_STMT(FloatingLiteral, {})
DEF_TRAVERSE_STMT(ImaginaryLiteral, {})
DEF_TRAVERSE_STMT(StringLiteral, {})
DEF_TRAVERSE_STMT(ObjCStringLiteral, {})
DEF_TRAVERSE_STMT(ObjCBoxedExpr, {})
DEF_TRAVERSE_STMT(ObjCArrayLiteral, {})
DEF_TRAVERSE_STMT(ObjCDictionaryLiteral, {})
// Traverse OpenCL: AsType, Convert.
DEF_TRAVERSE_STMT(AsTypeExpr, {})
// OpenMP directives.
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseOMPExecutableDirective(
OMPExecutableDirective *S) {
for (auto *C : S->clauses()) {
TRY_TO(TraverseOMPClause(C));
}
return true;
}
template <typename Derived>
bool
RecursiveASTVisitor<Derived>::TraverseOMPLoopDirective(OMPLoopDirective *S) {
return TraverseOMPExecutableDirective(S);
}
DEF_TRAVERSE_STMT(OMPParallelDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPSimdDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPForDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPForSimdDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPSectionsDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPSectionDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPSingleDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPMasterDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPCriticalDirective, {
TRY_TO(TraverseDeclarationNameInfo(S->getDirectiveName()));
TRY_TO(TraverseOMPExecutableDirective(S));
})
DEF_TRAVERSE_STMT(OMPParallelForDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPParallelForSimdDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPParallelSectionsDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTaskDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTaskyieldDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPBarrierDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTaskwaitDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTaskgroupDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPCancellationPointDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPCancelDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPFlushDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPOrderedDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPAtomicDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTargetDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTargetDataDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTargetEnterDataDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTargetExitDataDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTargetParallelDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTargetParallelForDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTeamsDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTargetUpdateDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTaskLoopDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTaskLoopSimdDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPDistributeDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPDistributeParallelForDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPDistributeParallelForSimdDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPDistributeSimdDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTargetParallelForSimdDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTargetSimdDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTeamsDistributeDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTeamsDistributeSimdDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTeamsDistributeParallelForSimdDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTeamsDistributeParallelForDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTargetTeamsDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTargetTeamsDistributeDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTargetTeamsDistributeParallelForDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTargetTeamsDistributeParallelForSimdDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
DEF_TRAVERSE_STMT(OMPTargetTeamsDistributeSimdDirective,
{ TRY_TO(TraverseOMPExecutableDirective(S)); })
// OpenMP clauses.
template <typename Derived>
bool RecursiveASTVisitor<Derived>::TraverseOMPClause(OMPClause *C) {
if (!C)
return true;
switch (C->getClauseKind()) {
#define OPENMP_CLAUSE(Name, Class) \
case OMPC_##Name: \
TRY_TO(Visit##Class(static_cast<Class *>(C))); \
break;
#include "clang/Basic/OpenMPKinds.def"
case OMPC_threadprivate:
case OMPC_uniform:
case OMPC_unknown:
break;
}
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPClauseWithPreInit(
OMPClauseWithPreInit *Node) {
TRY_TO(TraverseStmt(Node->getPreInitStmt()));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPClauseWithPostUpdate(
OMPClauseWithPostUpdate *Node) {
TRY_TO(VisitOMPClauseWithPreInit(Node));
TRY_TO(TraverseStmt(Node->getPostUpdateExpr()));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPIfClause(OMPIfClause *C) {
TRY_TO(VisitOMPClauseWithPreInit(C));
TRY_TO(TraverseStmt(C->getCondition()));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPFinalClause(OMPFinalClause *C) {
TRY_TO(TraverseStmt(C->getCondition()));
return true;
}
template <typename Derived>
bool
RecursiveASTVisitor<Derived>::VisitOMPNumThreadsClause(OMPNumThreadsClause *C) {
TRY_TO(VisitOMPClauseWithPreInit(C));
TRY_TO(TraverseStmt(C->getNumThreads()));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPSafelenClause(OMPSafelenClause *C) {
TRY_TO(TraverseStmt(C->getSafelen()));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPSimdlenClause(OMPSimdlenClause *C) {
TRY_TO(TraverseStmt(C->getSimdlen()));
return true;
}
template <typename Derived>
bool
RecursiveASTVisitor<Derived>::VisitOMPCollapseClause(OMPCollapseClause *C) {
TRY_TO(TraverseStmt(C->getNumForLoops()));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPDefaultClause(OMPDefaultClause *) {
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPProcBindClause(OMPProcBindClause *) {
return true;
}
template <typename Derived>
bool
RecursiveASTVisitor<Derived>::VisitOMPScheduleClause(OMPScheduleClause *C) {
TRY_TO(VisitOMPClauseWithPreInit(C));
TRY_TO(TraverseStmt(C->getChunkSize()));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPOrderedClause(OMPOrderedClause *C) {
TRY_TO(TraverseStmt(C->getNumForLoops()));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPNowaitClause(OMPNowaitClause *) {
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPUntiedClause(OMPUntiedClause *) {
return true;
}
template <typename Derived>
bool
RecursiveASTVisitor<Derived>::VisitOMPMergeableClause(OMPMergeableClause *) {
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPReadClause(OMPReadClause *) {
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPWriteClause(OMPWriteClause *) {
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPUpdateClause(OMPUpdateClause *) {
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPCaptureClause(OMPCaptureClause *) {
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPSeqCstClause(OMPSeqCstClause *) {
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPThreadsClause(OMPThreadsClause *) {
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPSIMDClause(OMPSIMDClause *) {
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPNogroupClause(OMPNogroupClause *) {
return true;
}
template <typename Derived>
template <typename T>
bool RecursiveASTVisitor<Derived>::VisitOMPClauseList(T *Node) {
for (auto *E : Node->varlists()) {
TRY_TO(TraverseStmt(E));
}
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPPrivateClause(OMPPrivateClause *C) {
TRY_TO(VisitOMPClauseList(C));
for (auto *E : C->private_copies()) {
TRY_TO(TraverseStmt(E));
}
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPFirstprivateClause(
OMPFirstprivateClause *C) {
TRY_TO(VisitOMPClauseList(C));
TRY_TO(VisitOMPClauseWithPreInit(C));
for (auto *E : C->private_copies()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->inits()) {
TRY_TO(TraverseStmt(E));
}
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPLastprivateClause(
OMPLastprivateClause *C) {
TRY_TO(VisitOMPClauseList(C));
TRY_TO(VisitOMPClauseWithPostUpdate(C));
for (auto *E : C->private_copies()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->source_exprs()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->destination_exprs()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->assignment_ops()) {
TRY_TO(TraverseStmt(E));
}
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPSharedClause(OMPSharedClause *C) {
TRY_TO(VisitOMPClauseList(C));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPLinearClause(OMPLinearClause *C) {
TRY_TO(TraverseStmt(C->getStep()));
TRY_TO(TraverseStmt(C->getCalcStep()));
TRY_TO(VisitOMPClauseList(C));
TRY_TO(VisitOMPClauseWithPostUpdate(C));
for (auto *E : C->privates()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->inits()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->updates()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->finals()) {
TRY_TO(TraverseStmt(E));
}
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPAlignedClause(OMPAlignedClause *C) {
TRY_TO(TraverseStmt(C->getAlignment()));
TRY_TO(VisitOMPClauseList(C));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPCopyinClause(OMPCopyinClause *C) {
TRY_TO(VisitOMPClauseList(C));
for (auto *E : C->source_exprs()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->destination_exprs()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->assignment_ops()) {
TRY_TO(TraverseStmt(E));
}
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPCopyprivateClause(
OMPCopyprivateClause *C) {
TRY_TO(VisitOMPClauseList(C));
for (auto *E : C->source_exprs()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->destination_exprs()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->assignment_ops()) {
TRY_TO(TraverseStmt(E));
}
return true;
}
template <typename Derived>
bool
RecursiveASTVisitor<Derived>::VisitOMPReductionClause(OMPReductionClause *C) {
TRY_TO(TraverseNestedNameSpecifierLoc(C->getQualifierLoc()));
TRY_TO(TraverseDeclarationNameInfo(C->getNameInfo()));
TRY_TO(VisitOMPClauseList(C));
TRY_TO(VisitOMPClauseWithPostUpdate(C));
for (auto *E : C->privates()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->lhs_exprs()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->rhs_exprs()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->reduction_ops()) {
TRY_TO(TraverseStmt(E));
}
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPTaskReductionClause(
OMPTaskReductionClause *C) {
TRY_TO(TraverseNestedNameSpecifierLoc(C->getQualifierLoc()));
TRY_TO(TraverseDeclarationNameInfo(C->getNameInfo()));
TRY_TO(VisitOMPClauseList(C));
TRY_TO(VisitOMPClauseWithPostUpdate(C));
for (auto *E : C->privates()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->lhs_exprs()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->rhs_exprs()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->reduction_ops()) {
TRY_TO(TraverseStmt(E));
}
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPInReductionClause(
OMPInReductionClause *C) {
TRY_TO(TraverseNestedNameSpecifierLoc(C->getQualifierLoc()));
TRY_TO(TraverseDeclarationNameInfo(C->getNameInfo()));
TRY_TO(VisitOMPClauseList(C));
TRY_TO(VisitOMPClauseWithPostUpdate(C));
for (auto *E : C->privates()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->lhs_exprs()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->rhs_exprs()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->reduction_ops()) {
TRY_TO(TraverseStmt(E));
}
for (auto *E : C->taskgroup_descriptors())
TRY_TO(TraverseStmt(E));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPFlushClause(OMPFlushClause *C) {
TRY_TO(VisitOMPClauseList(C));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPDependClause(OMPDependClause *C) {
TRY_TO(VisitOMPClauseList(C));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPDeviceClause(OMPDeviceClause *C) {
TRY_TO(VisitOMPClauseWithPreInit(C));
TRY_TO(TraverseStmt(C->getDevice()));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPMapClause(OMPMapClause *C) {
TRY_TO(VisitOMPClauseList(C));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPNumTeamsClause(
OMPNumTeamsClause *C) {
TRY_TO(VisitOMPClauseWithPreInit(C));
TRY_TO(TraverseStmt(C->getNumTeams()));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPThreadLimitClause(
OMPThreadLimitClause *C) {
TRY_TO(VisitOMPClauseWithPreInit(C));
TRY_TO(TraverseStmt(C->getThreadLimit()));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPPriorityClause(
OMPPriorityClause *C) {
TRY_TO(TraverseStmt(C->getPriority()));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPGrainsizeClause(
OMPGrainsizeClause *C) {
TRY_TO(TraverseStmt(C->getGrainsize()));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPNumTasksClause(
OMPNumTasksClause *C) {
TRY_TO(TraverseStmt(C->getNumTasks()));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPHintClause(OMPHintClause *C) {
TRY_TO(TraverseStmt(C->getHint()));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPDistScheduleClause(
OMPDistScheduleClause *C) {
TRY_TO(VisitOMPClauseWithPreInit(C));
TRY_TO(TraverseStmt(C->getChunkSize()));
return true;
}
template <typename Derived>
bool
RecursiveASTVisitor<Derived>::VisitOMPDefaultmapClause(OMPDefaultmapClause *C) {
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPToClause(OMPToClause *C) {
TRY_TO(VisitOMPClauseList(C));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPFromClause(OMPFromClause *C) {
TRY_TO(VisitOMPClauseList(C));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPUseDevicePtrClause(
OMPUseDevicePtrClause *C) {
TRY_TO(VisitOMPClauseList(C));
return true;
}
template <typename Derived>
bool RecursiveASTVisitor<Derived>::VisitOMPIsDevicePtrClause(
OMPIsDevicePtrClause *C) {
TRY_TO(VisitOMPClauseList(C));
return true;
}
// FIXME: look at the following tricky-seeming exprs to see if we
// need to recurse on anything. These are ones that have methods
// returning decls or qualtypes or nestednamespecifier -- though I'm
// not sure if they own them -- or just seemed very complicated, or
// had lots of sub-types to explore.
//
// VisitOverloadExpr and its children: recurse on template args? etc?
// FIXME: go through all the stmts and exprs again, and see which of them
// create new types, and recurse on the types (TypeLocs?) of those.
// Candidates:
//
// http://clang.llvm.org/doxygen/classclang_1_1CXXTypeidExpr.html
// http://clang.llvm.org/doxygen/classclang_1_1UnaryExprOrTypeTraitExpr.html
// http://clang.llvm.org/doxygen/classclang_1_1TypesCompatibleExpr.html
// Every class that has getQualifier.
#undef DEF_TRAVERSE_STMT
#undef TRAVERSE_STMT
#undef TRAVERSE_STMT_BASE
#undef TRY_TO
} // end namespace clang
#endif // LLVM_CLANG_AST_RECURSIVEASTVISITOR_H
| {
"content_hash": "5721cb2ecbb2a3fe72544d50461bc967",
"timestamp": "",
"source": "github",
"line_count": 3244,
"max_line_length": 82,
"avg_line_length": 36.29438964241677,
"alnum_prop": 0.6539379474940334,
"repo_name": "youtube/cobalt_sandbox",
"id": "0d2b670507c120f4026e1a5bc79dec9ebdf14485",
"size": "117739",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "third_party/llvm-project/clang/include/clang/AST/RecursiveASTVisitor.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
SRC=$(shell find src -name '*.hs')
CABAL=stack
FLAGS=--enable-tests --pedantic
all: init test docs package
init: stack.yaml
stack.yaml:
stack init --prefer-nightly
test: build
stack test
run: build
stack exec -- schwa
# docs:
# generate api documentation
#
# package:
# build a release tarball or executable
#
# dev:
# start dev server or process. `vagrant up`, `yesod devel`, etc.
#
# deploy:
# prep and push
install:
stack install
tags: ${SRC}
codex update
hlint:
hlint *.hs src specs
clean:
stack clean
codex cache clean
distclean: clean
build:
stack build
watch:
ghcid "--command=stack ghci"
restart: distclean init build
rebuild: clean build
create-app:
rhc app create schwa http://www.accursoft.com/cartridges/network.yml --no-scaling --from-code https://github.com/erochest/schwa.git --repo schwa-openshift --timeout 999999
.PHONY: all init test run clean distclean build rebuild hlint watch tags
| {
"content_hash": "3aa8b566c28de813e2508a9c84778e3a",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 172,
"avg_line_length": 15.327868852459016,
"alnum_prop": 0.7262032085561497,
"repo_name": "erochest/schwa",
"id": "102150c87d0ff0a076abad3b56521c5d17e99198",
"size": "936",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Makefile",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Haskell",
"bytes": "561"
},
{
"name": "Makefile",
"bytes": "936"
}
],
"symlink_target": ""
} |
// Type definitions for Node.js v6.x
// Project: http://nodejs.org/
// Definitions by: Microsoft TypeScript <http://typescriptlang.org>, DefinitelyTyped <https://github.com/DefinitelyTyped/DefinitelyTyped>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
/************************************************
* *
* Node.js v6.x API *
* *
************************************************/
// This needs to be global to avoid TS2403 in case lib.dom.d.ts is present in the same build
interface Console {
Console: typeof NodeJS.Console;
assert(value: any, message?: string, ...optionalParams: any[]): void;
dir(obj: any, options?: {showHidden?: boolean, depth?: number, colors?: boolean}): void;
error(message?: any, ...optionalParams: any[]): void;
info(message?: any, ...optionalParams: any[]): void;
log(message?: any, ...optionalParams: any[]): void;
time(label: string): void;
timeEnd(label: string): void;
trace(message?: any, ...optionalParams: any[]): void;
warn(message?: any, ...optionalParams: any[]): void;
}
interface Error {
stack?: string;
}
interface ErrorConstructor {
captureStackTrace(targetObject: Object, constructorOpt?: Function): void;
stackTraceLimit: number;
}
// compat for TypeScript 1.8
// if you use with --target es3 or --target es5 and use below definitions,
// use the lib.es6.d.ts that is bundled with TypeScript 1.8.
interface MapConstructor { }
interface WeakMapConstructor { }
interface SetConstructor { }
interface WeakSetConstructor { }
/************************************************
* *
* GLOBAL *
* *
************************************************/
declare var process: NodeJS.Process;
declare var global: NodeJS.Global;
declare var console: Console;
declare var __filename: string;
declare var __dirname: string;
declare function setTimeout(callback: (...args: any[]) => void, ms: number, ...args: any[]): NodeJS.Timer;
declare function clearTimeout(timeoutId: NodeJS.Timer): void;
declare function setInterval(callback: (...args: any[]) => void, ms: number, ...args: any[]): NodeJS.Timer;
declare function clearInterval(intervalId: NodeJS.Timer): void;
declare function setImmediate(callback: (...args: any[]) => void, ...args: any[]): any;
declare function clearImmediate(immediateId: any): void;
interface NodeRequireFunction {
(id: string): any;
}
interface NodeRequire extends NodeRequireFunction {
resolve(id: string): string;
cache: any;
extensions: any;
main: NodeModule | undefined;
}
declare var require: NodeRequire;
interface NodeModule {
exports: any;
require: NodeRequireFunction;
id: string;
filename: string;
loaded: boolean;
parent: NodeModule | null;
children: NodeModule[];
}
declare var module: NodeModule;
// Same as module.exports
declare var exports: any;
declare var SlowBuffer: {
new (str: string, encoding?: string): Buffer;
new (size: number): Buffer;
new (size: Uint8Array): Buffer;
new (array: any[]): Buffer;
prototype: Buffer;
isBuffer(obj: any): boolean;
byteLength(string: string, encoding?: string): number;
concat(list: Buffer[], totalLength?: number): Buffer;
};
// Buffer class
type BufferEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "binary" | "hex";
interface Buffer extends NodeBuffer { }
/**
* Raw data is stored in instances of the Buffer class.
* A Buffer is similar to an array of integers but corresponds to a raw memory allocation outside the V8 heap. A Buffer cannot be resized.
* Valid string encodings: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex'
*/
declare var Buffer: {
/**
* Allocates a new buffer containing the given {str}.
*
* @param str String to store in buffer.
* @param encoding encoding to use, optional. Default is 'utf8'
*/
new (str: string, encoding?: string): Buffer;
/**
* Allocates a new buffer of {size} octets.
*
* @param size count of octets to allocate.
*/
new (size: number): Buffer;
/**
* Allocates a new buffer containing the given {array} of octets.
*
* @param array The octets to store.
*/
new (array: Uint8Array): Buffer;
/**
* Produces a Buffer backed by the same allocated memory as
* the given {ArrayBuffer}.
*
*
* @param arrayBuffer The ArrayBuffer with which to share memory.
*/
new (arrayBuffer: ArrayBuffer): Buffer;
/**
* Allocates a new buffer containing the given {array} of octets.
*
* @param array The octets to store.
*/
new (array: any[]): Buffer;
/**
* Copies the passed {buffer} data onto a new {Buffer} instance.
*
* @param buffer The buffer to copy.
*/
new (buffer: Buffer): Buffer;
prototype: Buffer;
/**
* Allocates a new Buffer using an {array} of octets.
*
* @param array
*/
from(array: any[]): Buffer;
/**
* When passed a reference to the .buffer property of a TypedArray instance,
* the newly created Buffer will share the same allocated memory as the TypedArray.
* The optional {byteOffset} and {length} arguments specify a memory range
* within the {arrayBuffer} that will be shared by the Buffer.
*
* @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer()
* @param byteOffset
* @param length
*/
from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer;
/**
* Copies the passed {buffer} data onto a new Buffer instance.
*
* @param buffer
*/
from(buffer: Buffer): Buffer;
/**
* Creates a new Buffer containing the given JavaScript string {str}.
* If provided, the {encoding} parameter identifies the character encoding.
* If not provided, {encoding} defaults to 'utf8'.
*
* @param str
*/
from(str: string, encoding?: string): Buffer;
/**
* Returns true if {obj} is a Buffer
*
* @param obj object to test.
*/
isBuffer(obj: any): obj is Buffer;
/**
* Returns true if {encoding} is a valid encoding argument.
* Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex'
*
* @param encoding string to test.
*/
isEncoding(encoding: string): boolean;
/**
* Gives the actual byte length of a string. encoding defaults to 'utf8'.
* This is not the same as String.prototype.length since that returns the number of characters in a string.
*
* @param string string to test.
* @param encoding encoding used to evaluate (defaults to 'utf8')
*/
byteLength(string: string, encoding?: string): number;
/**
* Returns a buffer which is the result of concatenating all the buffers in the list together.
*
* If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer.
* If the list has exactly one item, then the first item of the list is returned.
* If the list has more than one item, then a new Buffer is created.
*
* @param list An array of Buffer objects to concatenate
* @param totalLength Total length of the buffers when concatenated.
* If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly.
*/
concat(list: Buffer[], totalLength?: number): Buffer;
/**
* The same as buf1.compare(buf2).
*/
compare(buf1: Buffer, buf2: Buffer): number;
/**
* Allocates a new buffer of {size} octets.
*
* @param size count of octets to allocate.
* @param fill if specified, buffer will be initialized by calling buf.fill(fill).
* If parameter is omitted, buffer will be filled with zeros.
* @param encoding encoding used for call to buf.fill while initalizing
*/
alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer;
/**
* Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents
* of the newly created Buffer are unknown and may contain sensitive data.
*
* @param size count of octets to allocate
*/
allocUnsafe(size: number): Buffer;
/**
* Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents
* of the newly created Buffer are unknown and may contain sensitive data.
*
* @param size count of octets to allocate
*/
allocUnsafeSlow(size: number): Buffer;
};
/************************************************
* *
* GLOBAL INTERFACES *
* *
************************************************/
declare namespace NodeJS {
export var Console: {
prototype: Console;
new(stdout: WritableStream, stderr?: WritableStream): Console;
}
export interface ErrnoException extends Error {
errno?: number;
code?: string;
path?: string;
syscall?: string;
stack?: string;
}
export class EventEmitter {
addListener(event: string | symbol, listener: Function): this;
on(event: string | symbol, listener: Function): this;
once(event: string | symbol, listener: Function): this;
removeListener(event: string | symbol, listener: Function): this;
removeAllListeners(event?: string | symbol): this;
setMaxListeners(n: number): this;
getMaxListeners(): number;
listeners(event: string | symbol): Function[];
emit(event: string | symbol, ...args: any[]): boolean;
listenerCount(type: string | symbol): number;
// Added in Node 6...
prependListener(event: string | symbol, listener: Function): this;
prependOnceListener(event: string | symbol, listener: Function): this;
eventNames(): (string | symbol)[];
}
export interface ReadableStream extends EventEmitter {
readable: boolean;
isTTY?: boolean;
read(size?: number): string | Buffer;
setEncoding(encoding: string | null): void;
pause(): ReadableStream;
resume(): ReadableStream;
pipe<T extends WritableStream>(destination: T, options?: { end?: boolean; }): T;
unpipe<T extends WritableStream>(destination?: T): void;
unshift(chunk: string): void;
unshift(chunk: Buffer): void;
wrap(oldStream: ReadableStream): ReadableStream;
}
export interface WritableStream extends EventEmitter {
writable: boolean;
isTTY?: boolean;
write(buffer: Buffer | string, cb?: Function): boolean;
write(str: string, encoding?: string, cb?: Function): boolean;
end(): void;
end(buffer: Buffer, cb?: Function): void;
end(str: string, cb?: Function): void;
end(str: string, encoding?: string, cb?: Function): void;
}
export interface ReadWriteStream extends ReadableStream, WritableStream {
pause(): ReadWriteStream;
resume(): ReadWriteStream;
}
export interface Events extends EventEmitter { }
export interface Domain extends Events {
run(fn: Function): void;
add(emitter: Events): void;
remove(emitter: Events): void;
bind(cb: (err: Error, data: any) => any): any;
intercept(cb: (data: any) => any): any;
dispose(): void;
addListener(event: string, listener: Function): this;
on(event: string, listener: Function): this;
once(event: string, listener: Function): this;
removeListener(event: string, listener: Function): this;
removeAllListeners(event?: string): this;
}
export interface MemoryUsage {
rss: number;
heapTotal: number;
heapUsed: number;
}
export interface ProcessVersions {
http_parser: string;
node: string;
v8: string;
ares: string;
uv: string;
zlib: string;
modules: string;
openssl: string;
}
export interface Process extends EventEmitter {
stdout: WritableStream;
stderr: WritableStream;
stdin: ReadableStream;
argv: string[];
argv0: string;
execArgv: string[];
execPath: string;
abort(): void;
chdir(directory: string): void;
cwd(): string;
env: any;
exit(code?: number): void;
exitCode: number;
getgid(): number;
setgid(id: number): void;
setgid(id: string): void;
getuid(): number;
setuid(id: number): void;
setuid(id: string): void;
version: string;
versions: ProcessVersions;
config: {
target_defaults: {
cflags: any[];
default_configuration: string;
defines: string[];
include_dirs: string[];
libraries: string[];
};
variables: {
clang: number;
host_arch: string;
node_install_npm: boolean;
node_install_waf: boolean;
node_prefix: string;
node_shared_openssl: boolean;
node_shared_v8: boolean;
node_shared_zlib: boolean;
node_use_dtrace: boolean;
node_use_etw: boolean;
node_use_openssl: boolean;
target_arch: string;
v8_no_strict_aliasing: number;
v8_use_snapshot: boolean;
visibility: string;
};
};
kill(pid: number, signal?: string | number): void;
pid: number;
title: string;
arch: string;
platform: string;
mainModule?: NodeModule;
memoryUsage(): MemoryUsage;
nextTick(callback: Function, ...args: any[]): void;
umask(mask?: number): number;
uptime(): number;
hrtime(time?: [number, number]): [number, number];
domain: Domain;
// Worker
send?(message: any, sendHandle?: any): void;
disconnect(): void;
connected: boolean;
}
export interface Global {
Array: typeof Array;
ArrayBuffer: typeof ArrayBuffer;
Boolean: typeof Boolean;
Buffer: typeof Buffer;
DataView: typeof DataView;
Date: typeof Date;
Error: typeof Error;
EvalError: typeof EvalError;
Float32Array: typeof Float32Array;
Float64Array: typeof Float64Array;
Function: typeof Function;
GLOBAL: Global;
Infinity: typeof Infinity;
Int16Array: typeof Int16Array;
Int32Array: typeof Int32Array;
Int8Array: typeof Int8Array;
Intl: typeof Intl;
JSON: typeof JSON;
Map: MapConstructor;
Math: typeof Math;
NaN: typeof NaN;
Number: typeof Number;
Object: typeof Object;
Promise: Function;
RangeError: typeof RangeError;
ReferenceError: typeof ReferenceError;
RegExp: typeof RegExp;
Set: SetConstructor;
String: typeof String;
Symbol: Function;
SyntaxError: typeof SyntaxError;
TypeError: typeof TypeError;
URIError: typeof URIError;
Uint16Array: typeof Uint16Array;
Uint32Array: typeof Uint32Array;
Uint8Array: typeof Uint8Array;
Uint8ClampedArray: Function;
WeakMap: WeakMapConstructor;
WeakSet: WeakSetConstructor;
clearImmediate: (immediateId: any) => void;
clearInterval: (intervalId: NodeJS.Timer) => void;
clearTimeout: (timeoutId: NodeJS.Timer) => void;
console: typeof console;
decodeURI: typeof decodeURI;
decodeURIComponent: typeof decodeURIComponent;
encodeURI: typeof encodeURI;
encodeURIComponent: typeof encodeURIComponent;
escape: (str: string) => string;
eval: typeof eval;
global: Global;
isFinite: typeof isFinite;
isNaN: typeof isNaN;
parseFloat: typeof parseFloat;
parseInt: typeof parseInt;
process: Process;
root: Global;
setImmediate: (callback: (...args: any[]) => void, ...args: any[]) => any;
setInterval: (callback: (...args: any[]) => void, ms: number, ...args: any[]) => NodeJS.Timer;
setTimeout: (callback: (...args: any[]) => void, ms: number, ...args: any[]) => NodeJS.Timer;
undefined: typeof undefined;
unescape: (str: string) => string;
gc: () => void;
v8debug?: any;
}
export interface Timer {
ref(): void;
unref(): void;
}
}
interface IterableIterator<T> { }
/**
* @deprecated
*/
interface NodeBuffer extends Uint8Array {
write(string: string, offset?: number, length?: number, encoding?: string): number;
toString(encoding?: string, start?: number, end?: number): string;
toJSON(): { type: 'Buffer', data: any[] };
equals(otherBuffer: Buffer): boolean;
compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number;
copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number;
slice(start?: number, end?: number): Buffer;
writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
readIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
readIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
readUInt8(offset: number, noAssert?: boolean): number;
readUInt16LE(offset: number, noAssert?: boolean): number;
readUInt16BE(offset: number, noAssert?: boolean): number;
readUInt32LE(offset: number, noAssert?: boolean): number;
readUInt32BE(offset: number, noAssert?: boolean): number;
readInt8(offset: number, noAssert?: boolean): number;
readInt16LE(offset: number, noAssert?: boolean): number;
readInt16BE(offset: number, noAssert?: boolean): number;
readInt32LE(offset: number, noAssert?: boolean): number;
readInt32BE(offset: number, noAssert?: boolean): number;
readFloatLE(offset: number, noAssert?: boolean): number;
readFloatBE(offset: number, noAssert?: boolean): number;
readDoubleLE(offset: number, noAssert?: boolean): number;
readDoubleBE(offset: number, noAssert?: boolean): number;
swap16(): Buffer;
swap32(): Buffer;
swap64(): Buffer;
writeUInt8(value: number, offset: number, noAssert?: boolean): number;
writeUInt16LE(value: number, offset: number, noAssert?: boolean): number;
writeUInt16BE(value: number, offset: number, noAssert?: boolean): number;
writeUInt32LE(value: number, offset: number, noAssert?: boolean): number;
writeUInt32BE(value: number, offset: number, noAssert?: boolean): number;
writeInt8(value: number, offset: number, noAssert?: boolean): number;
writeInt16LE(value: number, offset: number, noAssert?: boolean): number;
writeInt16BE(value: number, offset: number, noAssert?: boolean): number;
writeInt32LE(value: number, offset: number, noAssert?: boolean): number;
writeInt32BE(value: number, offset: number, noAssert?: boolean): number;
writeFloatLE(value: number, offset: number, noAssert?: boolean): number;
writeFloatBE(value: number, offset: number, noAssert?: boolean): number;
writeDoubleLE(value: number, offset: number, noAssert?: boolean): number;
writeDoubleBE(value: number, offset: number, noAssert?: boolean): number;
fill(value: any, offset?: number, end?: number): this;
indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
entries(): IterableIterator<[number, number]>;
includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean;
keys(): IterableIterator<number>;
values(): IterableIterator<number>;
}
/************************************************
* *
* MODULES *
* *
************************************************/
declare module "buffer" {
export var INSPECT_MAX_BYTES: number;
var BuffType: typeof Buffer;
var SlowBuffType: typeof SlowBuffer;
export { BuffType as Buffer, SlowBuffType as SlowBuffer };
}
declare module "querystring" {
export interface StringifyOptions {
encodeURIComponent?: Function;
}
export interface ParseOptions {
maxKeys?: number;
decodeURIComponent?: Function;
}
export function stringify<T>(obj: T, sep?: string, eq?: string, options?: StringifyOptions): string;
export function parse(str: string, sep?: string, eq?: string, options?: ParseOptions): any;
export function parse<T extends {}>(str: string, sep?: string, eq?: string, options?: ParseOptions): T;
export function escape(str: string): string;
export function unescape(str: string): string;
}
declare module "events" {
class internal extends NodeJS.EventEmitter { }
namespace internal {
export class EventEmitter extends internal {
static listenerCount(emitter: EventEmitter, event: string | symbol): number; // deprecated
static defaultMaxListeners: number;
addListener(event: string | symbol, listener: Function): this;
on(event: string | symbol, listener: Function): this;
once(event: string | symbol, listener: Function): this;
prependListener(event: string | symbol, listener: Function): this;
prependOnceListener(event: string | symbol, listener: Function): this;
removeListener(event: string | symbol, listener: Function): this;
removeAllListeners(event?: string | symbol): this;
setMaxListeners(n: number): this;
getMaxListeners(): number;
listeners(event: string | symbol): Function[];
emit(event: string | symbol, ...args: any[]): boolean;
eventNames(): (string | symbol)[];
listenerCount(type: string | symbol): number;
}
}
export = internal;
}
declare module "http" {
import * as events from "events";
import * as net from "net";
import * as stream from "stream";
export interface RequestOptions {
protocol?: string;
host?: string;
hostname?: string;
family?: number;
port?: number;
localAddress?: string;
socketPath?: string;
method?: string;
path?: string;
headers?: { [key: string]: any };
auth?: string;
agent?: Agent | boolean;
}
export interface Server extends net.Server {
setTimeout(msecs: number, callback: Function): void;
maxHeadersCount: number;
timeout: number;
listening: boolean;
}
/**
* @deprecated Use IncomingMessage
*/
export interface ServerRequest extends IncomingMessage {
connection: net.Socket;
}
export interface ServerResponse extends stream.Writable {
// Extended base methods
write(buffer: Buffer): boolean;
write(buffer: Buffer, cb?: Function): boolean;
write(str: string, cb?: Function): boolean;
write(str: string, encoding?: string, cb?: Function): boolean;
write(str: string, encoding?: string, fd?: string): boolean;
writeContinue(): void;
writeHead(statusCode: number, reasonPhrase?: string, headers?: any): void;
writeHead(statusCode: number, headers?: any): void;
statusCode: number;
statusMessage: string;
headersSent: boolean;
setHeader(name: string, value: string | string[]): void;
setTimeout(msecs: number, callback: Function): ServerResponse;
sendDate: boolean;
getHeader(name: string): string;
removeHeader(name: string): void;
write(chunk: any, encoding?: string): any;
addTrailers(headers: any): void;
finished: boolean;
// Extended base methods
end(): void;
end(buffer: Buffer, cb?: Function): void;
end(str: string, cb?: Function): void;
end(str: string, encoding?: string, cb?: Function): void;
end(data?: any, encoding?: string): void;
}
export interface ClientRequest extends stream.Writable {
// Extended base methods
write(buffer: Buffer): boolean;
write(buffer: Buffer, cb?: Function): boolean;
write(str: string, cb?: Function): boolean;
write(str: string, encoding?: string, cb?: Function): boolean;
write(str: string, encoding?: string, fd?: string): boolean;
write(chunk: any, encoding?: string): void;
abort(): void;
setTimeout(timeout: number, callback?: Function): void;
setNoDelay(noDelay?: boolean): void;
setSocketKeepAlive(enable?: boolean, initialDelay?: number): void;
setHeader(name: string, value: string | string[]): void;
getHeader(name: string): string;
removeHeader(name: string): void;
addTrailers(headers: any): void;
// Extended base methods
end(): void;
end(buffer: Buffer, cb?: Function): void;
end(str: string, cb?: Function): void;
end(str: string, encoding?: string, cb?: Function): void;
end(data?: any, encoding?: string): void;
}
export interface IncomingMessage extends stream.Readable {
httpVersion: string;
httpVersionMajor: number;
httpVersionMinor: number;
connection: net.Socket;
headers: any;
rawHeaders: string[];
trailers: any;
rawTrailers: any;
setTimeout(msecs: number, callback: Function): NodeJS.Timer;
/**
* Only valid for request obtained from http.Server.
*/
method?: string;
/**
* Only valid for request obtained from http.Server.
*/
url?: string;
/**
* Only valid for response obtained from http.ClientRequest.
*/
statusCode?: number;
/**
* Only valid for response obtained from http.ClientRequest.
*/
statusMessage?: string;
socket: net.Socket;
destroy(error?: Error): void;
}
/**
* @deprecated Use IncomingMessage
*/
export interface ClientResponse extends IncomingMessage { }
export interface AgentOptions {
/**
* Keep sockets around in a pool to be used by other requests in the future. Default = false
*/
keepAlive?: boolean;
/**
* When using HTTP KeepAlive, how often to send TCP KeepAlive packets over sockets being kept alive. Default = 1000.
* Only relevant if keepAlive is set to true.
*/
keepAliveMsecs?: number;
/**
* Maximum number of sockets to allow per host. Default for Node 0.10 is 5, default for Node 0.12 is Infinity
*/
maxSockets?: number;
/**
* Maximum number of sockets to leave open in a free state. Only relevant if keepAlive is set to true. Default = 256.
*/
maxFreeSockets?: number;
}
export class Agent {
maxSockets: number;
sockets: any;
requests: any;
constructor(opts?: AgentOptions);
/**
* Destroy any sockets that are currently in use by the agent.
* It is usually not necessary to do this. However, if you are using an agent with KeepAlive enabled,
* then it is best to explicitly shut down the agent when you know that it will no longer be used. Otherwise,
* sockets may hang open for quite a long time before the server terminates them.
*/
destroy(): void;
}
export var METHODS: string[];
export var STATUS_CODES: {
[errorCode: number]: string;
[errorCode: string]: string;
};
export function createServer(requestListener?: (request: IncomingMessage, response: ServerResponse) => void): Server;
export function createClient(port?: number, host?: string): any;
export function request(options: RequestOptions, callback?: (res: IncomingMessage) => void): ClientRequest;
export function get(options: any, callback?: (res: IncomingMessage) => void): ClientRequest;
export var globalAgent: Agent;
}
declare module "cluster" {
import * as child from "child_process";
import * as events from "events";
import * as net from "net";
// interfaces
export interface ClusterSettings {
execArgv?: string[]; // default: process.execArgv
exec?: string;
args?: string[];
silent?: boolean;
stdio?: any[];
uid?: number;
gid?: number;
}
export interface ClusterSetupMasterSettings {
exec?: string; // default: process.argv[1]
args?: string[]; // default: process.argv.slice(2)
silent?: boolean; // default: false
stdio?: any[];
}
export interface Address {
address: string;
port: number;
addressType: number | "udp4" | "udp6"; // 4, 6, -1, "udp4", "udp6"
}
export class Worker extends events.EventEmitter {
id: string;
process: child.ChildProcess;
suicide: boolean;
send(message: any, sendHandle?: any): boolean;
kill(signal?: string): void;
destroy(signal?: string): void;
disconnect(): void;
isConnected(): boolean;
isDead(): boolean;
exitedAfterDisconnect: boolean;
/**
* events.EventEmitter
* 1. disconnect
* 2. error
* 3. exit
* 4. listening
* 5. message
* 6. online
*/
addListener(event: string, listener: Function): this;
addListener(event: "disconnect", listener: () => void): this;
addListener(event: "error", listener: (code: number, signal: string) => void): this;
addListener(event: "exit", listener: (code: number, signal: string) => void): this;
addListener(event: "listening", listener: (address: Address) => void): this;
addListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
addListener(event: "online", listener: () => void): this;
emit(event: string, listener: Function): boolean
emit(event: "disconnect", listener: () => void): boolean
emit(event: "error", listener: (code: number, signal: string) => void): boolean
emit(event: "exit", listener: (code: number, signal: string) => void): boolean
emit(event: "listening", listener: (address: Address) => void): boolean
emit(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): boolean
emit(event: "online", listener: () => void): boolean
on(event: string, listener: Function): this;
on(event: "disconnect", listener: () => void): this;
on(event: "error", listener: (code: number, signal: string) => void): this;
on(event: "exit", listener: (code: number, signal: string) => void): this;
on(event: "listening", listener: (address: Address) => void): this;
on(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
on(event: "online", listener: () => void): this;
once(event: string, listener: Function): this;
once(event: "disconnect", listener: () => void): this;
once(event: "error", listener: (code: number, signal: string) => void): this;
once(event: "exit", listener: (code: number, signal: string) => void): this;
once(event: "listening", listener: (address: Address) => void): this;
once(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
once(event: "online", listener: () => void): this;
prependListener(event: string, listener: Function): this;
prependListener(event: "disconnect", listener: () => void): this;
prependListener(event: "error", listener: (code: number, signal: string) => void): this;
prependListener(event: "exit", listener: (code: number, signal: string) => void): this;
prependListener(event: "listening", listener: (address: Address) => void): this;
prependListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
prependListener(event: "online", listener: () => void): this;
prependOnceListener(event: string, listener: Function): this;
prependOnceListener(event: "disconnect", listener: () => void): this;
prependOnceListener(event: "error", listener: (code: number, signal: string) => void): this;
prependOnceListener(event: "exit", listener: (code: number, signal: string) => void): this;
prependOnceListener(event: "listening", listener: (address: Address) => void): this;
prependOnceListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
prependOnceListener(event: "online", listener: () => void): this;
}
export interface Cluster extends events.EventEmitter {
Worker: Worker;
disconnect(callback?: Function): void;
fork(env?: any): Worker;
isMaster: boolean;
isWorker: boolean;
// TODO: cluster.schedulingPolicy
settings: ClusterSettings;
setupMaster(settings?: ClusterSetupMasterSettings): void;
worker: Worker;
workers: {
[index: string]: Worker
};
/**
* events.EventEmitter
* 1. disconnect
* 2. exit
* 3. fork
* 4. listening
* 5. message
* 6. online
* 7. setup
*/
addListener(event: string, listener: Function): this;
addListener(event: "disconnect", listener: (worker: Worker) => void): this;
addListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
addListener(event: "fork", listener: (worker: Worker) => void): this;
addListener(event: "listening", listener: (worker: Worker, address: Address) => void): this;
addListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
addListener(event: "online", listener: (worker: Worker) => void): this;
addListener(event: "setup", listener: (settings: any) => void): this;
emit(event: string, listener: Function): boolean;
emit(event: "disconnect", listener: (worker: Worker) => void): boolean;
emit(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): boolean;
emit(event: "fork", listener: (worker: Worker) => void): boolean;
emit(event: "listening", listener: (worker: Worker, address: Address) => void): boolean;
emit(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): boolean;
emit(event: "online", listener: (worker: Worker) => void): boolean;
emit(event: "setup", listener: (settings: any) => void): boolean;
on(event: string, listener: Function): this;
on(event: "disconnect", listener: (worker: Worker) => void): this;
on(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
on(event: "fork", listener: (worker: Worker) => void): this;
on(event: "listening", listener: (worker: Worker, address: Address) => void): this;
on(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
on(event: "online", listener: (worker: Worker) => void): this;
on(event: "setup", listener: (settings: any) => void): this;
once(event: string, listener: Function): this;
once(event: "disconnect", listener: (worker: Worker) => void): this;
once(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
once(event: "fork", listener: (worker: Worker) => void): this;
once(event: "listening", listener: (worker: Worker, address: Address) => void): this;
once(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
once(event: "online", listener: (worker: Worker) => void): this;
once(event: "setup", listener: (settings: any) => void): this;
prependListener(event: string, listener: Function): this;
prependListener(event: "disconnect", listener: (worker: Worker) => void): this;
prependListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
prependListener(event: "fork", listener: (worker: Worker) => void): this;
prependListener(event: "listening", listener: (worker: Worker, address: Address) => void): this;
prependListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
prependListener(event: "online", listener: (worker: Worker) => void): this;
prependListener(event: "setup", listener: (settings: any) => void): this;
prependOnceListener(event: string, listener: Function): this;
prependOnceListener(event: "disconnect", listener: (worker: Worker) => void): this;
prependOnceListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
prependOnceListener(event: "fork", listener: (worker: Worker) => void): this;
prependOnceListener(event: "listening", listener: (worker: Worker, address: Address) => void): this;
prependOnceListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
prependOnceListener(event: "online", listener: (worker: Worker) => void): this;
prependOnceListener(event: "setup", listener: (settings: any) => void): this;
}
export function disconnect(callback?: Function): void;
export function fork(env?: any): Worker;
export var isMaster: boolean;
export var isWorker: boolean;
// TODO: cluster.schedulingPolicy
export var settings: ClusterSettings;
export function setupMaster(settings?: ClusterSetupMasterSettings): void;
export var worker: Worker;
export var workers: {
[index: string]: Worker
};
/**
* events.EventEmitter
* 1. disconnect
* 2. exit
* 3. fork
* 4. listening
* 5. message
* 6. online
* 7. setup
*/
export function addListener(event: string, listener: Function): Cluster;
export function addListener(event: "disconnect", listener: (worker: Worker) => void): Cluster;
export function addListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster;
export function addListener(event: "fork", listener: (worker: Worker) => void): Cluster;
export function addListener(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster;
export function addListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster; // the handle is a net.Socket or net.Server object, or undefined.
export function addListener(event: "online", listener: (worker: Worker) => void): Cluster;
export function addListener(event: "setup", listener: (settings: any) => void): Cluster;
export function emit(event: string, listener: Function): boolean;
export function emit(event: "disconnect", listener: (worker: Worker) => void): boolean;
export function emit(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): boolean;
export function emit(event: "fork", listener: (worker: Worker) => void): boolean;
export function emit(event: "listening", listener: (worker: Worker, address: Address) => void): boolean;
export function emit(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): boolean;
export function emit(event: "online", listener: (worker: Worker) => void): boolean;
export function emit(event: "setup", listener: (settings: any) => void): boolean;
export function on(event: string, listener: Function): Cluster;
export function on(event: "disconnect", listener: (worker: Worker) => void): Cluster;
export function on(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster;
export function on(event: "fork", listener: (worker: Worker) => void): Cluster;
export function on(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster;
export function on(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster; // the handle is a net.Socket or net.Server object, or undefined.
export function on(event: "online", listener: (worker: Worker) => void): Cluster;
export function on(event: "setup", listener: (settings: any) => void): Cluster;
export function once(event: string, listener: Function): Cluster;
export function once(event: "disconnect", listener: (worker: Worker) => void): Cluster;
export function once(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster;
export function once(event: "fork", listener: (worker: Worker) => void): Cluster;
export function once(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster;
export function once(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster; // the handle is a net.Socket or net.Server object, or undefined.
export function once(event: "online", listener: (worker: Worker) => void): Cluster;
export function once(event: "setup", listener: (settings: any) => void): Cluster;
export function removeListener(event: string, listener: Function): Cluster;
export function removeAllListeners(event?: string): Cluster;
export function setMaxListeners(n: number): Cluster;
export function getMaxListeners(): number;
export function listeners(event: string): Function[];
export function listenerCount(type: string): number;
export function prependListener(event: string, listener: Function): Cluster;
export function prependListener(event: "disconnect", listener: (worker: Worker) => void): Cluster;
export function prependListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster;
export function prependListener(event: "fork", listener: (worker: Worker) => void): Cluster;
export function prependListener(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster;
export function prependListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster; // the handle is a net.Socket or net.Server object, or undefined.
export function prependListener(event: "online", listener: (worker: Worker) => void): Cluster;
export function prependListener(event: "setup", listener: (settings: any) => void): Cluster;
export function prependOnceListener(event: string, listener: Function): Cluster;
export function prependOnceListener(event: "disconnect", listener: (worker: Worker) => void): Cluster;
export function prependOnceListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster;
export function prependOnceListener(event: "fork", listener: (worker: Worker) => void): Cluster;
export function prependOnceListener(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster;
export function prependOnceListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster; // the handle is a net.Socket or net.Server object, or undefined.
export function prependOnceListener(event: "online", listener: (worker: Worker) => void): Cluster;
export function prependOnceListener(event: "setup", listener: (settings: any) => void): Cluster;
export function eventNames(): string[];
}
declare module "zlib" {
import * as stream from "stream";
export interface ZlibOptions { chunkSize?: number; windowBits?: number; level?: number; memLevel?: number; strategy?: number; dictionary?: any; finishFlush?: number }
export interface Gzip extends stream.Transform { }
export interface Gunzip extends stream.Transform { }
export interface Deflate extends stream.Transform { }
export interface Inflate extends stream.Transform { }
export interface DeflateRaw extends stream.Transform { }
export interface InflateRaw extends stream.Transform { }
export interface Unzip extends stream.Transform { }
export function createGzip(options?: ZlibOptions): Gzip;
export function createGunzip(options?: ZlibOptions): Gunzip;
export function createDeflate(options?: ZlibOptions): Deflate;
export function createInflate(options?: ZlibOptions): Inflate;
export function createDeflateRaw(options?: ZlibOptions): DeflateRaw;
export function createInflateRaw(options?: ZlibOptions): InflateRaw;
export function createUnzip(options?: ZlibOptions): Unzip;
export function deflate(buf: Buffer | string, callback: (error: Error, result: Buffer) => void): void;
export function deflateSync(buf: Buffer | string, options?: ZlibOptions): Buffer;
export function deflateRaw(buf: Buffer | string, callback: (error: Error, result: Buffer) => void): void;
export function deflateRawSync(buf: Buffer | string, options?: ZlibOptions): Buffer;
export function gzip(buf: Buffer, callback: (error: Error, result: Buffer) => void): void;
export function gzipSync(buf: Buffer, options?: ZlibOptions): Buffer;
export function gunzip(buf: Buffer, callback: (error: Error, result: Buffer) => void): void;
export function gunzipSync(buf: Buffer, options?: ZlibOptions): Buffer;
export function inflate(buf: Buffer, callback: (error: Error, result: Buffer) => void): void;
export function inflateSync(buf: Buffer, options?: ZlibOptions): Buffer;
export function inflateRaw(buf: Buffer, callback: (error: Error, result: Buffer) => void): void;
export function inflateRawSync(buf: Buffer, options?: ZlibOptions): Buffer;
export function unzip(buf: Buffer, callback: (error: Error, result: Buffer) => void): void;
export function unzipSync(buf: Buffer, options?: ZlibOptions): Buffer;
// Constants
export var Z_NO_FLUSH: number;
export var Z_PARTIAL_FLUSH: number;
export var Z_SYNC_FLUSH: number;
export var Z_FULL_FLUSH: number;
export var Z_FINISH: number;
export var Z_BLOCK: number;
export var Z_TREES: number;
export var Z_OK: number;
export var Z_STREAM_END: number;
export var Z_NEED_DICT: number;
export var Z_ERRNO: number;
export var Z_STREAM_ERROR: number;
export var Z_DATA_ERROR: number;
export var Z_MEM_ERROR: number;
export var Z_BUF_ERROR: number;
export var Z_VERSION_ERROR: number;
export var Z_NO_COMPRESSION: number;
export var Z_BEST_SPEED: number;
export var Z_BEST_COMPRESSION: number;
export var Z_DEFAULT_COMPRESSION: number;
export var Z_FILTERED: number;
export var Z_HUFFMAN_ONLY: number;
export var Z_RLE: number;
export var Z_FIXED: number;
export var Z_DEFAULT_STRATEGY: number;
export var Z_BINARY: number;
export var Z_TEXT: number;
export var Z_ASCII: number;
export var Z_UNKNOWN: number;
export var Z_DEFLATED: number;
export var Z_NULL: number;
}
declare module "os" {
export interface CpuInfo {
model: string;
speed: number;
times: {
user: number;
nice: number;
sys: number;
idle: number;
irq: number;
};
}
export interface NetworkInterfaceInfo {
address: string;
netmask: string;
family: string;
mac: string;
internal: boolean;
}
export function hostname(): string;
export function loadavg(): number[];
export function uptime(): number;
export function freemem(): number;
export function totalmem(): number;
export function cpus(): CpuInfo[];
export function type(): string;
export function release(): string;
export function networkInterfaces(): { [index: string]: NetworkInterfaceInfo[] };
export function homedir(): string;
export function userInfo(options?: { encoding: string }): { username: string, uid: number, gid: number, shell: any, homedir: string }
export var constants: {
UV_UDP_REUSEADDR: number,
errno: {
SIGHUP: number;
SIGINT: number;
SIGQUIT: number;
SIGILL: number;
SIGTRAP: number;
SIGABRT: number;
SIGIOT: number;
SIGBUS: number;
SIGFPE: number;
SIGKILL: number;
SIGUSR1: number;
SIGSEGV: number;
SIGUSR2: number;
SIGPIPE: number;
SIGALRM: number;
SIGTERM: number;
SIGCHLD: number;
SIGSTKFLT: number;
SIGCONT: number;
SIGSTOP: number;
SIGTSTP: number;
SIGTTIN: number;
SIGTTOU: number;
SIGURG: number;
SIGXCPU: number;
SIGXFSZ: number;
SIGVTALRM: number;
SIGPROF: number;
SIGWINCH: number;
SIGIO: number;
SIGPOLL: number;
SIGPWR: number;
SIGSYS: number;
SIGUNUSED: number;
},
signals: {
E2BIG: number;
EACCES: number;
EADDRINUSE: number;
EADDRNOTAVAIL: number;
EAFNOSUPPORT: number;
EAGAIN: number;
EALREADY: number;
EBADF: number;
EBADMSG: number;
EBUSY: number;
ECANCELED: number;
ECHILD: number;
ECONNABORTED: number;
ECONNREFUSED: number;
ECONNRESET: number;
EDEADLK: number;
EDESTADDRREQ: number;
EDOM: number;
EDQUOT: number;
EEXIST: number;
EFAULT: number;
EFBIG: number;
EHOSTUNREACH: number;
EIDRM: number;
EILSEQ: number;
EINPROGRESS: number;
EINTR: number;
EINVAL: number;
EIO: number;
EISCONN: number;
EISDIR: number;
ELOOP: number;
EMFILE: number;
EMLINK: number;
EMSGSIZE: number;
EMULTIHOP: number;
ENAMETOOLONG: number;
ENETDOWN: number;
ENETRESET: number;
ENETUNREACH: number;
ENFILE: number;
ENOBUFS: number;
ENODATA: number;
ENODEV: number;
ENOENT: number;
ENOEXEC: number;
ENOLCK: number;
ENOLINK: number;
ENOMEM: number;
ENOMSG: number;
ENOPROTOOPT: number;
ENOSPC: number;
ENOSR: number;
ENOSTR: number;
ENOSYS: number;
ENOTCONN: number;
ENOTDIR: number;
ENOTEMPTY: number;
ENOTSOCK: number;
ENOTSUP: number;
ENOTTY: number;
ENXIO: number;
EOPNOTSUPP: number;
EOVERFLOW: number;
EPERM: number;
EPIPE: number;
EPROTO: number;
EPROTONOSUPPORT: number;
EPROTOTYPE: number;
ERANGE: number;
EROFS: number;
ESPIPE: number;
ESRCH: number;
ESTALE: number;
ETIME: number;
ETIMEDOUT: number;
ETXTBSY: number;
EWOULDBLOCK: number;
EXDEV: number;
},
};
export function arch(): string;
export function platform(): string;
export function tmpdir(): string;
export var EOL: string;
export function endianness(): "BE" | "LE";
}
declare module "https" {
import * as tls from "tls";
import * as events from "events";
import * as http from "http";
export interface ServerOptions {
pfx?: any;
key?: any;
passphrase?: string;
cert?: any;
ca?: any;
crl?: any;
ciphers?: string;
honorCipherOrder?: boolean;
requestCert?: boolean;
rejectUnauthorized?: boolean;
NPNProtocols?: any;
SNICallback?: (servername: string, cb: (err: Error, ctx: tls.SecureContext) => any) => any;
}
export interface RequestOptions extends http.RequestOptions {
pfx?: any;
key?: any;
passphrase?: string;
cert?: any;
ca?: any;
ciphers?: string;
rejectUnauthorized?: boolean;
secureProtocol?: string;
}
export interface Agent extends http.Agent { }
export interface AgentOptions extends http.AgentOptions {
pfx?: any;
key?: any;
passphrase?: string;
cert?: any;
ca?: any;
ciphers?: string;
rejectUnauthorized?: boolean;
secureProtocol?: string;
maxCachedSessions?: number;
}
export var Agent: {
new (options?: AgentOptions): Agent;
};
export interface Server extends tls.Server { }
export function createServer(options: ServerOptions, requestListener?: Function): Server;
export function request(options: RequestOptions, callback?: (res: http.IncomingMessage) => void): http.ClientRequest;
export function get(options: RequestOptions, callback?: (res: http.IncomingMessage) => void): http.ClientRequest;
export var globalAgent: Agent;
}
declare module "punycode" {
export function decode(string: string): string;
export function encode(string: string): string;
export function toUnicode(domain: string): string;
export function toASCII(domain: string): string;
export var ucs2: ucs2;
interface ucs2 {
decode(string: string): number[];
encode(codePoints: number[]): string;
}
export var version: any;
}
declare module "repl" {
import * as stream from "stream";
import * as readline from "readline";
export interface ReplOptions {
prompt?: string;
input?: NodeJS.ReadableStream;
output?: NodeJS.WritableStream;
terminal?: boolean;
eval?: Function;
useColors?: boolean;
useGlobal?: boolean;
ignoreUndefined?: boolean;
writer?: Function;
completer?: Function;
replMode?: any;
breakEvalOnSigint?: any;
}
export interface REPLServer extends readline.ReadLine {
defineCommand(keyword: string, cmd: Function | { help: string, action: Function }): void;
displayPrompt(preserveCursor?: boolean): void;
/**
* events.EventEmitter
* 1. exit
* 2. reset
**/
addListener(event: string, listener: Function): this;
addListener(event: "exit", listener: () => void): this;
addListener(event: "reset", listener: Function): this;
emit(event: string, ...args: any[]): boolean;
emit(event: "exit"): boolean;
emit(event: "reset", context: any): boolean;
on(event: string, listener: Function): this;
on(event: "exit", listener: () => void): this;
on(event: "reset", listener: Function): this;
once(event: string, listener: Function): this;
once(event: "exit", listener: () => void): this;
once(event: "reset", listener: Function): this;
prependListener(event: string, listener: Function): this;
prependListener(event: "exit", listener: () => void): this;
prependListener(event: "reset", listener: Function): this;
prependOnceListener(event: string, listener: Function): this;
prependOnceListener(event: "exit", listener: () => void): this;
prependOnceListener(event: "reset", listener: Function): this;
}
export function start(options: ReplOptions): REPLServer;
}
declare module "readline" {
import * as events from "events";
import * as stream from "stream";
export interface Key {
sequence?: string;
name?: string;
ctrl?: boolean;
meta?: boolean;
shift?: boolean;
}
export interface ReadLine extends events.EventEmitter {
setPrompt(prompt: string): void;
prompt(preserveCursor?: boolean): void;
question(query: string, callback: (answer: string) => void): void;
pause(): ReadLine;
resume(): ReadLine;
close(): void;
write(data: string | Buffer, key?: Key): void;
/**
* events.EventEmitter
* 1. close
* 2. line
* 3. pause
* 4. resume
* 5. SIGCONT
* 6. SIGINT
* 7. SIGTSTP
**/
addListener(event: string, listener: Function): this;
addListener(event: "close", listener: () => void): this;
addListener(event: "line", listener: (input: any) => void): this;
addListener(event: "pause", listener: () => void): this;
addListener(event: "resume", listener: () => void): this;
addListener(event: "SIGCONT", listener: () => void): this;
addListener(event: "SIGINT", listener: () => void): this;
addListener(event: "SIGTSTP", listener: () => void): this;
emit(event: string, ...args: any[]): boolean;
emit(event: "close"): boolean;
emit(event: "line", input: any): boolean;
emit(event: "pause"): boolean;
emit(event: "resume"): boolean;
emit(event: "SIGCONT"): boolean;
emit(event: "SIGINT"): boolean;
emit(event: "SIGTSTP"): boolean;
on(event: string, listener: Function): this;
on(event: "close", listener: () => void): this;
on(event: "line", listener: (input: any) => void): this;
on(event: "pause", listener: () => void): this;
on(event: "resume", listener: () => void): this;
on(event: "SIGCONT", listener: () => void): this;
on(event: "SIGINT", listener: () => void): this;
on(event: "SIGTSTP", listener: () => void): this;
once(event: string, listener: Function): this;
once(event: "close", listener: () => void): this;
once(event: "line", listener: (input: any) => void): this;
once(event: "pause", listener: () => void): this;
once(event: "resume", listener: () => void): this;
once(event: "SIGCONT", listener: () => void): this;
once(event: "SIGINT", listener: () => void): this;
once(event: "SIGTSTP", listener: () => void): this;
prependListener(event: string, listener: Function): this;
prependListener(event: "close", listener: () => void): this;
prependListener(event: "line", listener: (input: any) => void): this;
prependListener(event: "pause", listener: () => void): this;
prependListener(event: "resume", listener: () => void): this;
prependListener(event: "SIGCONT", listener: () => void): this;
prependListener(event: "SIGINT", listener: () => void): this;
prependListener(event: "SIGTSTP", listener: () => void): this;
prependOnceListener(event: string, listener: Function): this;
prependOnceListener(event: "close", listener: () => void): this;
prependOnceListener(event: "line", listener: (input: any) => void): this;
prependOnceListener(event: "pause", listener: () => void): this;
prependOnceListener(event: "resume", listener: () => void): this;
prependOnceListener(event: "SIGCONT", listener: () => void): this;
prependOnceListener(event: "SIGINT", listener: () => void): this;
prependOnceListener(event: "SIGTSTP", listener: () => void): this;
}
export interface Completer {
(line: string): CompleterResult;
(line: string, callback: (err: any, result: CompleterResult) => void): any;
}
export type CompleterResult = [string[], string];
export interface ReadLineOptions {
input: NodeJS.ReadableStream;
output?: NodeJS.WritableStream;
completer?: Completer;
terminal?: boolean;
historySize?: number;
}
export function createInterface(input: NodeJS.ReadableStream, output?: NodeJS.WritableStream, completer?: Completer, terminal?: boolean): ReadLine;
export function createInterface(options: ReadLineOptions): ReadLine;
export function cursorTo(stream: NodeJS.WritableStream, x: number, y: number): void;
export function moveCursor(stream: NodeJS.WritableStream, dx: number | string, dy: number | string): void;
export function clearLine(stream: NodeJS.WritableStream, dir: number): void;
export function clearScreenDown(stream: NodeJS.WritableStream): void;
}
declare module "vm" {
export interface Context { }
export interface ScriptOptions {
filename?: string;
lineOffset?: number;
columnOffset?: number;
displayErrors?: boolean;
timeout?: number;
cachedData?: Buffer;
produceCachedData?: boolean;
}
export interface RunningScriptOptions {
filename?: string;
lineOffset?: number;
columnOffset?: number;
displayErrors?: boolean;
timeout?: number;
}
export class Script {
constructor(code: string, options?: ScriptOptions);
runInContext(contextifiedSandbox: Context, options?: RunningScriptOptions): any;
runInNewContext(sandbox?: Context, options?: RunningScriptOptions): any;
runInThisContext(options?: RunningScriptOptions): any;
}
export function createContext(sandbox?: Context): Context;
export function isContext(sandbox: Context): boolean;
export function runInContext(code: string, contextifiedSandbox: Context, options?: RunningScriptOptions): any;
export function runInDebugContext(code: string): any;
export function runInNewContext(code: string, sandbox?: Context, options?: RunningScriptOptions): any;
export function runInThisContext(code: string, options?: RunningScriptOptions): any;
}
declare module "child_process" {
import * as events from "events";
import * as stream from "stream";
import * as net from "net";
export interface ChildProcess extends events.EventEmitter {
stdin: stream.Writable;
stdout: stream.Readable;
stderr: stream.Readable;
stdio: [stream.Writable, stream.Readable, stream.Readable];
pid: number;
kill(signal?: string): void;
send(message: any, sendHandle?: any): boolean;
connected: boolean;
disconnect(): void;
unref(): void;
ref(): void;
/**
* events.EventEmitter
* 1. close
* 2. disconnet
* 3. error
* 4. exit
* 5. message
**/
addListener(event: string, listener: Function): this;
addListener(event: "close", listener: (code: number, signal: string) => void): this;
addListener(event: "disconnet", listener: () => void): this;
addListener(event: "error", listener: (err: Error) => void): this;
addListener(event: "exit", listener: (code: number, signal: string) => void): this;
addListener(event: "message", listener: (message: any, sendHandle: net.Socket | net.Server) => void): this;
emit(event: string, ...args: any[]): boolean;
emit(event: "close", code: number, signal: string): boolean;
emit(event: "disconnet"): boolean;
emit(event: "error", err: Error): boolean;
emit(event: "exit", code: number, signal: string): boolean;
emit(event: "message", message: any, sendHandle: net.Socket | net.Server): boolean;
on(event: string, listener: Function): this;
on(event: "close", listener: (code: number, signal: string) => void): this;
on(event: "disconnet", listener: () => void): this;
on(event: "error", listener: (err: Error) => void): this;
on(event: "exit", listener: (code: number, signal: string) => void): this;
on(event: "message", listener: (message: any, sendHandle: net.Socket | net.Server) => void): this;
once(event: string, listener: Function): this;
once(event: "close", listener: (code: number, signal: string) => void): this;
once(event: "disconnet", listener: () => void): this;
once(event: "error", listener: (err: Error) => void): this;
once(event: "exit", listener: (code: number, signal: string) => void): this;
once(event: "message", listener: (message: any, sendHandle: net.Socket | net.Server) => void): this;
prependListener(event: string, listener: Function): this;
prependListener(event: "close", listener: (code: number, signal: string) => void): this;
prependListener(event: "disconnet", listener: () => void): this;
prependListener(event: "error", listener: (err: Error) => void): this;
prependListener(event: "exit", listener: (code: number, signal: string) => void): this;
prependListener(event: "message", listener: (message: any, sendHandle: net.Socket | net.Server) => void): this;
prependOnceListener(event: string, listener: Function): this;
prependOnceListener(event: "close", listener: (code: number, signal: string) => void): this;
prependOnceListener(event: "disconnet", listener: () => void): this;
prependOnceListener(event: "error", listener: (err: Error) => void): this;
prependOnceListener(event: "exit", listener: (code: number, signal: string) => void): this;
prependOnceListener(event: "message", listener: (message: any, sendHandle: net.Socket | net.Server) => void): this;
}
export interface SpawnOptions {
cwd?: string;
env?: any;
stdio?: any;
detached?: boolean;
uid?: number;
gid?: number;
shell?: boolean | string;
}
export function spawn(command: string, args?: string[], options?: SpawnOptions): ChildProcess;
export interface ExecOptions {
cwd?: string;
env?: any;
shell?: string;
timeout?: number;
maxBuffer?: number;
killSignal?: string;
uid?: number;
gid?: number;
}
export interface ExecOptionsWithStringEncoding extends ExecOptions {
encoding: BufferEncoding;
}
export interface ExecOptionsWithBufferEncoding extends ExecOptions {
encoding: string; // specify `null`.
}
export function exec(command: string, callback?: (error: Error, stdout: string, stderr: string) => void): ChildProcess;
export function exec(command: string, options: ExecOptionsWithStringEncoding, callback?: (error: Error, stdout: string, stderr: string) => void): ChildProcess;
// usage. child_process.exec("tsc", {encoding: null as string}, (err, stdout, stderr) => {});
export function exec(command: string, options: ExecOptionsWithBufferEncoding, callback?: (error: Error, stdout: Buffer, stderr: Buffer) => void): ChildProcess;
export function exec(command: string, options: ExecOptions, callback?: (error: Error, stdout: string, stderr: string) => void): ChildProcess;
export interface ExecFileOptions {
cwd?: string;
env?: any;
timeout?: number;
maxBuffer?: number;
killSignal?: string;
uid?: number;
gid?: number;
}
export interface ExecFileOptionsWithStringEncoding extends ExecFileOptions {
encoding: BufferEncoding;
}
export interface ExecFileOptionsWithBufferEncoding extends ExecFileOptions {
encoding: string; // specify `null`.
}
export function execFile(file: string, callback?: (error: Error, stdout: string, stderr: string) => void): ChildProcess;
export function execFile(file: string, options?: ExecFileOptionsWithStringEncoding, callback?: (error: Error, stdout: string, stderr: string) => void): ChildProcess;
// usage. child_process.execFile("file.sh", {encoding: null as string}, (err, stdout, stderr) => {});
export function execFile(file: string, options?: ExecFileOptionsWithBufferEncoding, callback?: (error: Error, stdout: Buffer, stderr: Buffer) => void): ChildProcess;
export function execFile(file: string, options?: ExecFileOptions, callback?: (error: Error, stdout: string, stderr: string) => void): ChildProcess;
export function execFile(file: string, args?: string[], callback?: (error: Error, stdout: string, stderr: string) => void): ChildProcess;
export function execFile(file: string, args?: string[], options?: ExecFileOptionsWithStringEncoding, callback?: (error: Error, stdout: string, stderr: string) => void): ChildProcess;
// usage. child_process.execFile("file.sh", ["foo"], {encoding: null as string}, (err, stdout, stderr) => {});
export function execFile(file: string, args?: string[], options?: ExecFileOptionsWithBufferEncoding, callback?: (error: Error, stdout: Buffer, stderr: Buffer) => void): ChildProcess;
export function execFile(file: string, args?: string[], options?: ExecFileOptions, callback?: (error: Error, stdout: string, stderr: string) => void): ChildProcess;
export interface ForkOptions {
cwd?: string;
env?: any;
execPath?: string;
execArgv?: string[];
silent?: boolean;
uid?: number;
gid?: number;
}
export function fork(modulePath: string, args?: string[], options?: ForkOptions): ChildProcess;
export interface SpawnSyncOptions {
cwd?: string;
input?: string | Buffer;
stdio?: any;
env?: any;
uid?: number;
gid?: number;
timeout?: number;
killSignal?: string;
maxBuffer?: number;
encoding?: string;
shell?: boolean | string;
}
export interface SpawnSyncOptionsWithStringEncoding extends SpawnSyncOptions {
encoding: BufferEncoding;
}
export interface SpawnSyncOptionsWithBufferEncoding extends SpawnSyncOptions {
encoding: string; // specify `null`.
}
export interface SpawnSyncReturns<T> {
pid: number;
output: string[];
stdout: T;
stderr: T;
status: number;
signal: string;
error: Error;
}
export function spawnSync(command: string): SpawnSyncReturns<Buffer>;
export function spawnSync(command: string, options?: SpawnSyncOptionsWithStringEncoding): SpawnSyncReturns<string>;
export function spawnSync(command: string, options?: SpawnSyncOptionsWithBufferEncoding): SpawnSyncReturns<Buffer>;
export function spawnSync(command: string, options?: SpawnSyncOptions): SpawnSyncReturns<Buffer>;
export function spawnSync(command: string, args?: string[], options?: SpawnSyncOptionsWithStringEncoding): SpawnSyncReturns<string>;
export function spawnSync(command: string, args?: string[], options?: SpawnSyncOptionsWithBufferEncoding): SpawnSyncReturns<Buffer>;
export function spawnSync(command: string, args?: string[], options?: SpawnSyncOptions): SpawnSyncReturns<Buffer>;
export interface ExecSyncOptions {
cwd?: string;
input?: string | Buffer;
stdio?: any;
env?: any;
shell?: string;
uid?: number;
gid?: number;
timeout?: number;
killSignal?: string;
maxBuffer?: number;
encoding?: string;
}
export interface ExecSyncOptionsWithStringEncoding extends ExecSyncOptions {
encoding: BufferEncoding;
}
export interface ExecSyncOptionsWithBufferEncoding extends ExecSyncOptions {
encoding: string; // specify `null`.
}
export function execSync(command: string): Buffer;
export function execSync(command: string, options?: ExecSyncOptionsWithStringEncoding): string;
export function execSync(command: string, options?: ExecSyncOptionsWithBufferEncoding): Buffer;
export function execSync(command: string, options?: ExecSyncOptions): Buffer;
export interface ExecFileSyncOptions {
cwd?: string;
input?: string | Buffer;
stdio?: any;
env?: any;
uid?: number;
gid?: number;
timeout?: number;
killSignal?: string;
maxBuffer?: number;
encoding?: string;
}
export interface ExecFileSyncOptionsWithStringEncoding extends ExecFileSyncOptions {
encoding: BufferEncoding;
}
export interface ExecFileSyncOptionsWithBufferEncoding extends ExecFileSyncOptions {
encoding: string; // specify `null`.
}
export function execFileSync(command: string): Buffer;
export function execFileSync(command: string, options?: ExecFileSyncOptionsWithStringEncoding): string;
export function execFileSync(command: string, options?: ExecFileSyncOptionsWithBufferEncoding): Buffer;
export function execFileSync(command: string, options?: ExecFileSyncOptions): Buffer;
export function execFileSync(command: string, args?: string[], options?: ExecFileSyncOptionsWithStringEncoding): string;
export function execFileSync(command: string, args?: string[], options?: ExecFileSyncOptionsWithBufferEncoding): Buffer;
export function execFileSync(command: string, args?: string[], options?: ExecFileSyncOptions): Buffer;
}
declare module "url" {
export interface Url {
href?: string;
protocol?: string;
auth?: string;
hostname?: string;
port?: string;
host?: string;
pathname?: string;
search?: string;
query?: string | any;
slashes?: boolean;
hash?: string;
path?: string;
}
export function parse(urlStr: string, parseQueryString?: boolean, slashesDenoteHost?: boolean): Url;
export function format(url: Url): string;
export function resolve(from: string, to: string): string;
}
declare module "dns" {
export interface MxRecord {
exchange: string,
priority: number
}
export function lookup(domain: string, family: number, callback: (err: Error, address: string, family: number) => void): string;
export function lookup(domain: string, callback: (err: Error, address: string, family: number) => void): string;
export function resolve(domain: string, rrtype: string, callback: (err: Error, addresses: string[]) => void): string[];
export function resolve(domain: string, callback: (err: Error, addresses: string[]) => void): string[];
export function resolve4(domain: string, callback: (err: Error, addresses: string[]) => void): string[];
export function resolve6(domain: string, callback: (err: Error, addresses: string[]) => void): string[];
export function resolveMx(domain: string, callback: (err: Error, addresses: MxRecord[]) => void): string[];
export function resolveTxt(domain: string, callback: (err: Error, addresses: string[]) => void): string[];
export function resolveSrv(domain: string, callback: (err: Error, addresses: string[]) => void): string[];
export function resolveNs(domain: string, callback: (err: Error, addresses: string[]) => void): string[];
export function resolveCname(domain: string, callback: (err: Error, addresses: string[]) => void): string[];
export function reverse(ip: string, callback: (err: Error, domains: string[]) => void): string[];
export function setServers(servers: string[]): void;
//Error codes
export var NODATA: string;
export var FORMERR: string;
export var SERVFAIL: string;
export var NOTFOUND: string;
export var NOTIMP: string;
export var REFUSED: string;
export var BADQUERY: string;
export var BADNAME: string;
export var BADFAMILY: string;
export var BADRESP: string;
export var CONNREFUSED: string;
export var TIMEOUT: string;
export var EOF: string;
export var FILE: string;
export var NOMEM: string;
export var DESTRUCTION: string;
export var BADSTR: string;
export var BADFLAGS: string;
export var NONAME: string;
export var BADHINTS: string;
export var NOTINITIALIZED: string;
export var LOADIPHLPAPI: string;
export var ADDRGETNETWORKPARAMS: string;
export var CANCELLED: string;
}
declare module "net" {
import * as stream from "stream";
import * as events from "events";
export interface Socket extends stream.Duplex {
// Extended base methods
write(buffer: Buffer): boolean;
write(buffer: Buffer, cb?: Function): boolean;
write(str: string, cb?: Function): boolean;
write(str: string, encoding?: string, cb?: Function): boolean;
write(str: string, encoding?: string, fd?: string): boolean;
connect(port: number, host?: string, connectionListener?: Function): void;
connect(path: string, connectionListener?: Function): void;
bufferSize: number;
setEncoding(encoding?: string): void;
write(data: any, encoding?: string, callback?: Function): void;
destroy(): void;
pause(): Socket;
resume(): Socket;
setTimeout(timeout: number, callback?: Function): void;
setNoDelay(noDelay?: boolean): void;
setKeepAlive(enable?: boolean, initialDelay?: number): void;
address(): { port: number; family: string; address: string; };
unref(): void;
ref(): void;
remoteAddress: string;
remoteFamily: string;
remotePort: number;
localAddress: string;
localPort: number;
bytesRead: number;
bytesWritten: number;
destroyed: boolean;
// Extended base methods
end(): void;
end(buffer: Buffer, cb?: Function): void;
end(str: string, cb?: Function): void;
end(str: string, encoding?: string, cb?: Function): void;
end(data?: any, encoding?: string): void;
/**
* events.EventEmitter
* 1. close
* 2. connect
* 3. data
* 4. drain
* 5. end
* 6. error
* 7. lookup
* 8. timeout
*/
addListener(event: string, listener: Function): this;
addListener(event: "close", listener: (had_error: boolean) => void): this;
addListener(event: "connect", listener: () => void): this;
addListener(event: "data", listener: (data: Buffer) => void): this;
addListener(event: "drain", listener: () => void): this;
addListener(event: "end", listener: () => void): this;
addListener(event: "error", listener: (err: Error) => void): this;
addListener(event: "lookup", listener: (err: Error, address: string, family: string | number, host: string) => void): this;
addListener(event: "timeout", listener: () => void): this;
emit(event: string, ...args: any[]): boolean;
emit(event: "close", had_error: boolean): boolean;
emit(event: "connect"): boolean;
emit(event: "data", data: Buffer): boolean;
emit(event: "drain"): boolean;
emit(event: "end"): boolean;
emit(event: "error", err: Error): boolean;
emit(event: "lookup", err: Error, address: string, family: string | number, host: string): boolean;
emit(event: "timeout"): boolean;
on(event: string, listener: Function): this;
on(event: "close", listener: (had_error: boolean) => void): this;
on(event: "connect", listener: () => void): this;
on(event: "data", listener: (data: Buffer) => void): this;
on(event: "drain", listener: () => void): this;
on(event: "end", listener: () => void): this;
on(event: "error", listener: (err: Error) => void): this;
on(event: "lookup", listener: (err: Error, address: string, family: string | number, host: string) => void): this;
on(event: "timeout", listener: () => void): this;
once(event: string, listener: Function): this;
once(event: "close", listener: (had_error: boolean) => void): this;
once(event: "connect", listener: () => void): this;
once(event: "data", listener: (data: Buffer) => void): this;
once(event: "drain", listener: () => void): this;
once(event: "end", listener: () => void): this;
once(event: "error", listener: (err: Error) => void): this;
once(event: "lookup", listener: (err: Error, address: string, family: string | number, host: string) => void): this;
once(event: "timeout", listener: () => void): this;
prependListener(event: string, listener: Function): this;
prependListener(event: "close", listener: (had_error: boolean) => void): this;
prependListener(event: "connect", listener: () => void): this;
prependListener(event: "data", listener: (data: Buffer) => void): this;
prependListener(event: "drain", listener: () => void): this;
prependListener(event: "end", listener: () => void): this;
prependListener(event: "error", listener: (err: Error) => void): this;
prependListener(event: "lookup", listener: (err: Error, address: string, family: string | number, host: string) => void): this;
prependListener(event: "timeout", listener: () => void): this;
prependOnceListener(event: string, listener: Function): this;
prependOnceListener(event: "close", listener: (had_error: boolean) => void): this;
prependOnceListener(event: "connect", listener: () => void): this;
prependOnceListener(event: "data", listener: (data: Buffer) => void): this;
prependOnceListener(event: "drain", listener: () => void): this;
prependOnceListener(event: "end", listener: () => void): this;
prependOnceListener(event: "error", listener: (err: Error) => void): this;
prependOnceListener(event: "lookup", listener: (err: Error, address: string, family: string | number, host: string) => void): this;
prependOnceListener(event: "timeout", listener: () => void): this;
}
export var Socket: {
new (options?: { fd?: string; type?: string; allowHalfOpen?: boolean; }): Socket;
};
export interface ListenOptions {
port?: number;
host?: string;
backlog?: number;
path?: string;
exclusive?: boolean;
}
export interface Server extends events.EventEmitter {
listen(port: number, hostname?: string, backlog?: number, listeningListener?: Function): Server;
listen(port: number, hostname?: string, listeningListener?: Function): Server;
listen(port: number, backlog?: number, listeningListener?: Function): Server;
listen(port: number, listeningListener?: Function): Server;
listen(path: string, backlog?: number, listeningListener?: Function): Server;
listen(path: string, listeningListener?: Function): Server;
listen(options: ListenOptions, listeningListener?: Function): Server;
listen(handle: any, backlog?: number, listeningListener?: Function): Server;
listen(handle: any, listeningListener?: Function): Server;
close(callback?: Function): Server;
address(): { port: number; family: string; address: string; };
getConnections(cb: (error: Error, count: number) => void): void;
ref(): Server;
unref(): Server;
maxConnections: number;
connections: number;
/**
* events.EventEmitter
* 1. close
* 2. connection
* 3. error
* 4. listening
*/
addListener(event: string, listener: Function): this;
addListener(event: "close", listener: () => void): this;
addListener(event: "connection", listener: (socket: Socket) => void): this;
addListener(event: "error", listener: (err: Error) => void): this;
addListener(event: "listening", listener: () => void): this;
emit(event: string, ...args: any[]): boolean;
emit(event: "close"): boolean;
emit(event: "connection", socket: Socket): boolean;
emit(event: "error", err: Error): boolean;
emit(event: "listening"): boolean;
on(event: string, listener: Function): this;
on(event: "close", listener: () => void): this;
on(event: "connection", listener: (socket: Socket) => void): this;
on(event: "error", listener: (err: Error) => void): this;
on(event: "listening", listener: () => void): this;
once(event: string, listener: Function): this;
once(event: "close", listener: () => void): this;
once(event: "connection", listener: (socket: Socket) => void): this;
once(event: "error", listener: (err: Error) => void): this;
once(event: "listening", listener: () => void): this;
prependListener(event: string, listener: Function): this;
prependListener(event: "close", listener: () => void): this;
prependListener(event: "connection", listener: (socket: Socket) => void): this;
prependListener(event: "error", listener: (err: Error) => void): this;
prependListener(event: "listening", listener: () => void): this;
prependOnceListener(event: string, listener: Function): this;
prependOnceListener(event: "close", listener: () => void): this;
prependOnceListener(event: "connection", listener: (socket: Socket) => void): this;
prependOnceListener(event: "error", listener: (err: Error) => void): this;
prependOnceListener(event: "listening", listener: () => void): this;
}
export function createServer(connectionListener?: (socket: Socket) => void): Server;
export function createServer(options?: { allowHalfOpen?: boolean; }, connectionListener?: (socket: Socket) => void): Server;
export function connect(options: { port: number, host?: string, localAddress?: string, localPort?: string, family?: number, allowHalfOpen?: boolean; }, connectionListener?: Function): Socket;
export function connect(port: number, host?: string, connectionListener?: Function): Socket;
export function connect(path: string, connectionListener?: Function): Socket;
export function createConnection(options: { port: number, host?: string, localAddress?: string, localPort?: string, family?: number, allowHalfOpen?: boolean; }, connectionListener?: Function): Socket;
export function createConnection(port: number, host?: string, connectionListener?: Function): Socket;
export function createConnection(path: string, connectionListener?: Function): Socket;
export function isIP(input: string): number;
export function isIPv4(input: string): boolean;
export function isIPv6(input: string): boolean;
}
declare module "dgram" {
import * as events from "events";
interface RemoteInfo {
address: string;
family: string;
port: number;
}
interface AddressInfo {
address: string;
family: string;
port: number;
}
interface BindOptions {
port: number;
address?: string;
exclusive?: boolean;
}
interface SocketOptions {
type: "udp4" | "udp6";
reuseAddr?: boolean;
}
export function createSocket(type: string, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket;
export function createSocket(options: SocketOptions, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket;
export interface Socket extends events.EventEmitter {
send(msg: Buffer | String | any[], port: number, address: string, callback?: (error: Error, bytes: number) => void): void;
send(msg: Buffer | String | any[], offset: number, length: number, port: number, address: string, callback?: (error: Error, bytes: number) => void): void;
bind(port?: number, address?: string, callback?: () => void): void;
bind(options: BindOptions, callback?: Function): void;
close(callback?: any): void;
address(): AddressInfo;
setBroadcast(flag: boolean): void;
setTTL(ttl: number): void;
setMulticastTTL(ttl: number): void;
setMulticastLoopback(flag: boolean): void;
addMembership(multicastAddress: string, multicastInterface?: string): void;
dropMembership(multicastAddress: string, multicastInterface?: string): void;
ref(): void;
unref(): void;
/**
* events.EventEmitter
* 1. close
* 2. error
* 3. listening
* 4. message
**/
addListener(event: string, listener: Function): this;
addListener(event: "close", listener: () => void): this;
addListener(event: "error", listener: (err: Error) => void): this;
addListener(event: "listening", listener: () => void): this;
addListener(event: "message", listener: (msg: string, rinfo: AddressInfo) => void): this;
emit(event: string, ...args: any[]): boolean;
emit(event: "close"): boolean;
emit(event: "error", err: Error): boolean;
emit(event: "listening"): boolean;
emit(event: "message", msg: string, rinfo: AddressInfo): boolean;
on(event: string, listener: Function): this;
on(event: "close", listener: () => void): this;
on(event: "error", listener: (err: Error) => void): this;
on(event: "listening", listener: () => void): this;
on(event: "message", listener: (msg: string, rinfo: AddressInfo) => void): this;
once(event: string, listener: Function): this;
once(event: "close", listener: () => void): this;
once(event: "error", listener: (err: Error) => void): this;
once(event: "listening", listener: () => void): this;
once(event: "message", listener: (msg: string, rinfo: AddressInfo) => void): this;
prependListener(event: string, listener: Function): this;
prependListener(event: "close", listener: () => void): this;
prependListener(event: "error", listener: (err: Error) => void): this;
prependListener(event: "listening", listener: () => void): this;
prependListener(event: "message", listener: (msg: string, rinfo: AddressInfo) => void): this;
prependOnceListener(event: string, listener: Function): this;
prependOnceListener(event: "close", listener: () => void): this;
prependOnceListener(event: "error", listener: (err: Error) => void): this;
prependOnceListener(event: "listening", listener: () => void): this;
prependOnceListener(event: "message", listener: (msg: string, rinfo: AddressInfo) => void): this;
}
}
declare module "fs" {
import * as stream from "stream";
import * as events from "events";
interface Stats {
isFile(): boolean;
isDirectory(): boolean;
isBlockDevice(): boolean;
isCharacterDevice(): boolean;
isSymbolicLink(): boolean;
isFIFO(): boolean;
isSocket(): boolean;
dev: number;
ino: number;
mode: number;
nlink: number;
uid: number;
gid: number;
rdev: number;
size: number;
blksize: number;
blocks: number;
atime: Date;
mtime: Date;
ctime: Date;
birthtime: Date;
}
interface FSWatcher extends events.EventEmitter {
close(): void;
/**
* events.EventEmitter
* 1. change
* 2. error
*/
addListener(event: string, listener: Function): this;
addListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this;
addListener(event: "error", listener: (code: number, signal: string) => void): this;
on(event: string, listener: Function): this;
on(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this;
on(event: "error", listener: (code: number, signal: string) => void): this;
once(event: string, listener: Function): this;
once(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this;
once(event: "error", listener: (code: number, signal: string) => void): this;
prependListener(event: string, listener: Function): this;
prependListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this;
prependListener(event: "error", listener: (code: number, signal: string) => void): this;
prependOnceListener(event: string, listener: Function): this;
prependOnceListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this;
prependOnceListener(event: "error", listener: (code: number, signal: string) => void): this;
}
export interface ReadStream extends stream.Readable {
close(): void;
destroy(): void;
bytesRead: number;
path: string | Buffer;
/**
* events.EventEmitter
* 1. open
* 2. close
*/
addListener(event: string, listener: Function): this;
addListener(event: "open", listener: (fd: number) => void): this;
addListener(event: "close", listener: () => void): this;
on(event: string, listener: Function): this;
on(event: "open", listener: (fd: number) => void): this;
on(event: "close", listener: () => void): this;
once(event: string, listener: Function): this;
once(event: "open", listener: (fd: number) => void): this;
once(event: "close", listener: () => void): this;
prependListener(event: string, listener: Function): this;
prependListener(event: "open", listener: (fd: number) => void): this;
prependListener(event: "close", listener: () => void): this;
prependOnceListener(event: string, listener: Function): this;
prependOnceListener(event: "open", listener: (fd: number) => void): this;
prependOnceListener(event: "close", listener: () => void): this;
}
export interface WriteStream extends stream.Writable {
close(): void;
bytesWritten: number;
path: string | Buffer;
/**
* events.EventEmitter
* 1. open
* 2. close
*/
addListener(event: string, listener: Function): this;
addListener(event: "open", listener: (fd: number) => void): this;
addListener(event: "close", listener: () => void): this;
on(event: string, listener: Function): this;
on(event: "open", listener: (fd: number) => void): this;
on(event: "close", listener: () => void): this;
once(event: string, listener: Function): this;
once(event: "open", listener: (fd: number) => void): this;
once(event: "close", listener: () => void): this;
prependListener(event: string, listener: Function): this;
prependListener(event: "open", listener: (fd: number) => void): this;
prependListener(event: "close", listener: () => void): this;
prependOnceListener(event: string, listener: Function): this;
prependOnceListener(event: "open", listener: (fd: number) => void): this;
prependOnceListener(event: "close", listener: () => void): this;
}
/**
* Asynchronous rename.
* @param oldPath
* @param newPath
* @param callback No arguments other than a possible exception are given to the completion callback.
*/
export function rename(oldPath: string, newPath: string, callback?: (err?: NodeJS.ErrnoException) => void): void;
/**
* Synchronous rename
* @param oldPath
* @param newPath
*/
export function renameSync(oldPath: string, newPath: string): void;
export function truncate(path: string | Buffer, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function truncate(path: string | Buffer, len: number, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function truncateSync(path: string | Buffer, len?: number): void;
export function ftruncate(fd: number, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function ftruncate(fd: number, len: number, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function ftruncateSync(fd: number, len?: number): void;
export function chown(path: string | Buffer, uid: number, gid: number, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function chownSync(path: string | Buffer, uid: number, gid: number): void;
export function fchown(fd: number, uid: number, gid: number, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function fchownSync(fd: number, uid: number, gid: number): void;
export function lchown(path: string | Buffer, uid: number, gid: number, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function lchownSync(path: string | Buffer, uid: number, gid: number): void;
export function chmod(path: string | Buffer, mode: number, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function chmod(path: string | Buffer, mode: string, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function chmodSync(path: string | Buffer, mode: number): void;
export function chmodSync(path: string | Buffer, mode: string): void;
export function fchmod(fd: number, mode: number, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function fchmod(fd: number, mode: string, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function fchmodSync(fd: number, mode: number): void;
export function fchmodSync(fd: number, mode: string): void;
export function lchmod(path: string | Buffer, mode: number, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function lchmod(path: string | Buffer, mode: string, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function lchmodSync(path: string | Buffer, mode: number): void;
export function lchmodSync(path: string | Buffer, mode: string): void;
export function stat(path: string | Buffer, callback?: (err: NodeJS.ErrnoException, stats: Stats) => any): void;
export function lstat(path: string | Buffer, callback?: (err: NodeJS.ErrnoException, stats: Stats) => any): void;
export function fstat(fd: number, callback?: (err: NodeJS.ErrnoException, stats: Stats) => any): void;
export function statSync(path: string | Buffer): Stats;
export function lstatSync(path: string | Buffer): Stats;
export function fstatSync(fd: number): Stats;
export function link(srcpath: string | Buffer, dstpath: string | Buffer, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function linkSync(srcpath: string | Buffer, dstpath: string | Buffer): void;
export function symlink(srcpath: string | Buffer, dstpath: string | Buffer, type?: string, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function symlinkSync(srcpath: string | Buffer, dstpath: string | Buffer, type?: string): void;
export function readlink(path: string | Buffer, callback?: (err: NodeJS.ErrnoException, linkString: string) => any): void;
export function readlinkSync(path: string | Buffer): string;
export function realpath(path: string | Buffer, callback?: (err: NodeJS.ErrnoException, resolvedPath: string) => any): void;
export function realpath(path: string | Buffer, cache: { [path: string]: string }, callback: (err: NodeJS.ErrnoException, resolvedPath: string) => any): void;
export function realpathSync(path: string | Buffer, cache?: { [path: string]: string }): string;
/*
* Asynchronous unlink - deletes the file specified in {path}
*
* @param path
* @param callback No arguments other than a possible exception are given to the completion callback.
*/
export function unlink(path: string | Buffer, callback?: (err?: NodeJS.ErrnoException) => void): void;
/*
* Synchronous unlink - deletes the file specified in {path}
*
* @param path
*/
export function unlinkSync(path: string | Buffer): void;
/*
* Asynchronous rmdir - removes the directory specified in {path}
*
* @param path
* @param callback No arguments other than a possible exception are given to the completion callback.
*/
export function rmdir(path: string | Buffer, callback?: (err?: NodeJS.ErrnoException) => void): void;
/*
* Synchronous rmdir - removes the directory specified in {path}
*
* @param path
*/
export function rmdirSync(path: string | Buffer): void;
/*
* Asynchronous mkdir - creates the directory specified in {path}. Parameter {mode} defaults to 0777.
*
* @param path
* @param callback No arguments other than a possible exception are given to the completion callback.
*/
export function mkdir(path: string | Buffer, callback?: (err?: NodeJS.ErrnoException) => void): void;
/*
* Asynchronous mkdir - creates the directory specified in {path}. Parameter {mode} defaults to 0777.
*
* @param path
* @param mode
* @param callback No arguments other than a possible exception are given to the completion callback.
*/
export function mkdir(path: string | Buffer, mode: number, callback?: (err?: NodeJS.ErrnoException) => void): void;
/*
* Asynchronous mkdir - creates the directory specified in {path}. Parameter {mode} defaults to 0777.
*
* @param path
* @param mode
* @param callback No arguments other than a possible exception are given to the completion callback.
*/
export function mkdir(path: string | Buffer, mode: string, callback?: (err?: NodeJS.ErrnoException) => void): void;
/*
* Synchronous mkdir - creates the directory specified in {path}. Parameter {mode} defaults to 0777.
*
* @param path
* @param mode
* @param callback No arguments other than a possible exception are given to the completion callback.
*/
export function mkdirSync(path: string | Buffer, mode?: number): void;
/*
* Synchronous mkdir - creates the directory specified in {path}. Parameter {mode} defaults to 0777.
*
* @param path
* @param mode
* @param callback No arguments other than a possible exception are given to the completion callback.
*/
export function mkdirSync(path: string | Buffer, mode?: string): void;
/*
* Asynchronous mkdtemp - Creates a unique temporary directory. Generates six random characters to be appended behind a required prefix to create a unique temporary directory.
*
* @param prefix
* @param callback The created folder path is passed as a string to the callback's second parameter.
*/
export function mkdtemp(prefix: string, callback?: (err: NodeJS.ErrnoException, folder: string) => void): void;
/*
* Synchronous mkdtemp - Creates a unique temporary directory. Generates six random characters to be appended behind a required prefix to create a unique temporary directory.
*
* @param prefix
* @returns Returns the created folder path.
*/
export function mkdtempSync(prefix: string): string;
export function readdir(path: string | Buffer, callback?: (err: NodeJS.ErrnoException, files: string[]) => void): void;
export function readdirSync(path: string | Buffer): string[];
export function close(fd: number, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function closeSync(fd: number): void;
export function open(path: string | Buffer, flags: string | number, callback: (err: NodeJS.ErrnoException, fd: number) => void): void;
export function open(path: string | Buffer, flags: string | number, mode: number, callback: (err: NodeJS.ErrnoException, fd: number) => void): void;
export function openSync(path: string | Buffer, flags: string | number, mode?: number): number;
export function utimes(path: string | Buffer, atime: number, mtime: number, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function utimes(path: string | Buffer, atime: Date, mtime: Date, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function utimesSync(path: string | Buffer, atime: number, mtime: number): void;
export function utimesSync(path: string | Buffer, atime: Date, mtime: Date): void;
export function futimes(fd: number, atime: number, mtime: number, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function futimes(fd: number, atime: Date, mtime: Date, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function futimesSync(fd: number, atime: number, mtime: number): void;
export function futimesSync(fd: number, atime: Date, mtime: Date): void;
export function fsync(fd: number, callback?: (err?: NodeJS.ErrnoException) => void): void;
export function fsyncSync(fd: number): void;
export function write(fd: number, buffer: Buffer, offset: number, length: number, position: number, callback?: (err: NodeJS.ErrnoException, written: number, buffer: Buffer) => void): void;
export function write(fd: number, buffer: Buffer, offset: number, length: number, callback?: (err: NodeJS.ErrnoException, written: number, buffer: Buffer) => void): void;
export function write(fd: number, data: any, callback?: (err: NodeJS.ErrnoException, written: number, str: string) => void): void;
export function write(fd: number, data: any, offset: number, callback?: (err: NodeJS.ErrnoException, written: number, str: string) => void): void;
export function write(fd: number, data: any, offset: number, encoding: string, callback?: (err: NodeJS.ErrnoException, written: number, str: string) => void): void;
export function writeSync(fd: number, buffer: Buffer, offset: number, length: number, position?: number): number;
export function writeSync(fd: number, data: any, position?: number, enconding?: string): number;
export function read(fd: number, buffer: Buffer, offset: number, length: number, position: number, callback?: (err: NodeJS.ErrnoException, bytesRead: number, buffer: Buffer) => void): void;
export function readSync(fd: number, buffer: Buffer, offset: number, length: number, position: number): number;
/*
* Asynchronous readFile - Asynchronously reads the entire contents of a file.
*
* @param fileName
* @param encoding
* @param callback - The callback is passed two arguments (err, data), where data is the contents of the file.
*/
export function readFile(filename: string, encoding: string, callback: (err: NodeJS.ErrnoException, data: string) => void): void;
/*
* Asynchronous readFile - Asynchronously reads the entire contents of a file.
*
* @param fileName
* @param options An object with optional {encoding} and {flag} properties. If {encoding} is specified, readFile returns a string; otherwise it returns a Buffer.
* @param callback - The callback is passed two arguments (err, data), where data is the contents of the file.
*/
export function readFile(filename: string, options: { encoding: string; flag?: string; }, callback: (err: NodeJS.ErrnoException, data: string) => void): void;
/*
* Asynchronous readFile - Asynchronously reads the entire contents of a file.
*
* @param fileName
* @param options An object with optional {encoding} and {flag} properties. If {encoding} is specified, readFile returns a string; otherwise it returns a Buffer.
* @param callback - The callback is passed two arguments (err, data), where data is the contents of the file.
*/
export function readFile(filename: string, options: { flag?: string; }, callback: (err: NodeJS.ErrnoException, data: Buffer) => void): void;
/*
* Asynchronous readFile - Asynchronously reads the entire contents of a file.
*
* @param fileName
* @param callback - The callback is passed two arguments (err, data), where data is the contents of the file.
*/
export function readFile(filename: string, callback: (err: NodeJS.ErrnoException, data: Buffer) => void): void;
/*
* Synchronous readFile - Synchronously reads the entire contents of a file.
*
* @param fileName
* @param encoding
*/
export function readFileSync(filename: string, encoding: string): string;
/*
* Synchronous readFile - Synchronously reads the entire contents of a file.
*
* @param fileName
* @param options An object with optional {encoding} and {flag} properties. If {encoding} is specified, readFileSync returns a string; otherwise it returns a Buffer.
*/
export function readFileSync(filename: string, options: { encoding: string; flag?: string; }): string;
/*
* Synchronous readFile - Synchronously reads the entire contents of a file.
*
* @param fileName
* @param options An object with optional {encoding} and {flag} properties. If {encoding} is specified, readFileSync returns a string; otherwise it returns a Buffer.
*/
export function readFileSync(filename: string, options?: { flag?: string; }): Buffer;
export function writeFile(filename: string, data: any, callback?: (err: NodeJS.ErrnoException) => void): void;
export function writeFile(filename: string, data: any, options: { encoding?: string; mode?: number; flag?: string; }, callback?: (err: NodeJS.ErrnoException) => void): void;
export function writeFile(filename: string, data: any, options: { encoding?: string; mode?: string; flag?: string; }, callback?: (err: NodeJS.ErrnoException) => void): void;
export function writeFileSync(filename: string, data: any, options?: { encoding?: string; mode?: number; flag?: string; }): void;
export function writeFileSync(filename: string, data: any, options?: { encoding?: string; mode?: string; flag?: string; }): void;
export function appendFile(filename: string, data: any, options: { encoding?: string; mode?: number; flag?: string; }, callback?: (err: NodeJS.ErrnoException) => void): void;
export function appendFile(filename: string, data: any, options: { encoding?: string; mode?: string; flag?: string; }, callback?: (err: NodeJS.ErrnoException) => void): void;
export function appendFile(filename: string, data: any, callback?: (err: NodeJS.ErrnoException) => void): void;
export function appendFileSync(filename: string, data: any, options?: { encoding?: string; mode?: number; flag?: string; }): void;
export function appendFileSync(filename: string, data: any, options?: { encoding?: string; mode?: string; flag?: string; }): void;
export function watchFile(filename: string, listener: (curr: Stats, prev: Stats) => void): void;
export function watchFile(filename: string, options: { persistent?: boolean; interval?: number; }, listener: (curr: Stats, prev: Stats) => void): void;
export function unwatchFile(filename: string, listener?: (curr: Stats, prev: Stats) => void): void;
export function watch(filename: string, listener?: (event: string, filename: string) => any): FSWatcher;
export function watch(filename: string, encoding: string, listener?: (event: string, filename: string | Buffer) => any): FSWatcher;
export function watch(filename: string, options: { persistent?: boolean; recursive?: boolean; encoding?: string }, listener?: (event: string, filename: string | Buffer) => any): FSWatcher;
export function exists(path: string | Buffer, callback?: (exists: boolean) => void): void;
export function existsSync(path: string | Buffer): boolean;
export namespace constants {
// File Access Constants
/** Constant for fs.access(). File is visible to the calling process. */
export const F_OK: number;
/** Constant for fs.access(). File can be read by the calling process. */
export const R_OK: number;
/** Constant for fs.access(). File can be written by the calling process. */
export const W_OK: number;
/** Constant for fs.access(). File can be executed by the calling process. */
export const X_OK: number;
// File Open Constants
/** Constant for fs.open(). Flag indicating to open a file for read-only access. */
export const O_RDONLY: number;
/** Constant for fs.open(). Flag indicating to open a file for write-only access. */
export const O_WRONLY: number;
/** Constant for fs.open(). Flag indicating to open a file for read-write access. */
export const O_RDWR: number;
/** Constant for fs.open(). Flag indicating to create the file if it does not already exist. */
export const O_CREAT: number;
/** Constant for fs.open(). Flag indicating that opening a file should fail if the O_CREAT flag is set and the file already exists. */
export const O_EXCL: number;
/** Constant for fs.open(). Flag indicating that if path identifies a terminal device, opening the path shall not cause that terminal to become the controlling terminal for the process (if the process does not already have one). */
export const O_NOCTTY: number;
/** Constant for fs.open(). Flag indicating that if the file exists and is a regular file, and the file is opened successfully for write access, its length shall be truncated to zero. */
export const O_TRUNC: number;
/** Constant for fs.open(). Flag indicating that data will be appended to the end of the file. */
export const O_APPEND: number;
/** Constant for fs.open(). Flag indicating that the open should fail if the path is not a directory. */
export const O_DIRECTORY: number;
/** Constant for fs.open(). Flag indicating reading accesses to the file system will no longer result in an update to the atime information associated with the file. This flag is available on Linux operating systems only. */
export const O_NOATIME: number;
/** Constant for fs.open(). Flag indicating that the open should fail if the path is a symbolic link. */
export const O_NOFOLLOW: number;
/** Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O. */
export const O_SYNC: number;
/** Constant for fs.open(). Flag indicating to open the symbolic link itself rather than the resource it is pointing to. */
export const O_SYMLINK: number;
/** Constant for fs.open(). When set, an attempt will be made to minimize caching effects of file I/O. */
export const O_DIRECT: number;
/** Constant for fs.open(). Flag indicating to open the file in nonblocking mode when possible. */
export const O_NONBLOCK: number;
// File Type Constants
/** Constant for fs.Stats mode property for determining a file's type. Bit mask used to extract the file type code. */
export const S_IFMT: number;
/** Constant for fs.Stats mode property for determining a file's type. File type constant for a regular file. */
export const S_IFREG: number;
/** Constant for fs.Stats mode property for determining a file's type. File type constant for a directory. */
export const S_IFDIR: number;
/** Constant for fs.Stats mode property for determining a file's type. File type constant for a character-oriented device file. */
export const S_IFCHR: number;
/** Constant for fs.Stats mode property for determining a file's type. File type constant for a block-oriented device file. */
export const S_IFBLK: number;
/** Constant for fs.Stats mode property for determining a file's type. File type constant for a FIFO/pipe. */
export const S_IFIFO: number;
/** Constant for fs.Stats mode property for determining a file's type. File type constant for a symbolic link. */
export const S_IFLNK: number;
/** Constant for fs.Stats mode property for determining a file's type. File type constant for a socket. */
export const S_IFSOCK: number;
// File Mode Constants
/** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by owner. */
export const S_IRWXU: number;
/** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by owner. */
export const S_IRUSR: number;
/** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by owner. */
export const S_IWUSR: number;
/** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by owner. */
export const S_IXUSR: number;
/** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by group. */
export const S_IRWXG: number;
/** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by group. */
export const S_IRGRP: number;
/** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by group. */
export const S_IWGRP: number;
/** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by group. */
export const S_IXGRP: number;
/** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by others. */
export const S_IRWXO: number;
/** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by others. */
export const S_IROTH: number;
/** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by others. */
export const S_IWOTH: number;
/** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by others. */
export const S_IXOTH: number;
}
/** Tests a user's permissions for the file specified by path. */
export function access(path: string | Buffer, callback: (err: NodeJS.ErrnoException) => void): void;
export function access(path: string | Buffer, mode: number, callback: (err: NodeJS.ErrnoException) => void): void;
/** Synchronous version of fs.access. This throws if any accessibility checks fail, and does nothing otherwise. */
export function accessSync(path: string | Buffer, mode?: number): void;
export function createReadStream(path: string | Buffer, options?: {
flags?: string;
encoding?: string;
fd?: number;
mode?: number;
autoClose?: boolean;
start?: number;
end?: number;
}): ReadStream;
export function createWriteStream(path: string | Buffer, options?: {
flags?: string;
encoding?: string;
fd?: number;
mode?: number;
autoClose?: boolean;
start?: number;
}): WriteStream;
export function fdatasync(fd: number, callback: Function): void;
export function fdatasyncSync(fd: number): void;
}
declare module "path" {
/**
* A parsed path object generated by path.parse() or consumed by path.format().
*/
export interface ParsedPath {
/**
* The root of the path such as '/' or 'c:\'
*/
root: string;
/**
* The full directory path such as '/home/user/dir' or 'c:\path\dir'
*/
dir: string;
/**
* The file name including extension (if any) such as 'index.html'
*/
base: string;
/**
* The file extension (if any) such as '.html'
*/
ext: string;
/**
* The file name without extension (if any) such as 'index'
*/
name: string;
}
/**
* Normalize a string path, reducing '..' and '.' parts.
* When multiple slashes are found, they're replaced by a single one; when the path contains a trailing slash, it is preserved. On Windows backslashes are used.
*
* @param p string path to normalize.
*/
export function normalize(p: string): string;
/**
* Join all arguments together and normalize the resulting path.
* Arguments must be strings. In v0.8, non-string arguments were silently ignored. In v0.10 and up, an exception is thrown.
*
* @param paths string paths to join.
*/
export function join(...paths: any[]): string;
/**
* Join all arguments together and normalize the resulting path.
* Arguments must be strings. In v0.8, non-string arguments were silently ignored. In v0.10 and up, an exception is thrown.
*
* @param paths string paths to join.
*/
export function join(...paths: string[]): string;
/**
* The right-most parameter is considered {to}. Other parameters are considered an array of {from}.
*
* Starting from leftmost {from} paramter, resolves {to} to an absolute path.
*
* If {to} isn't already absolute, {from} arguments are prepended in right to left order, until an absolute path is found. If after using all {from} paths still no absolute path is found, the current working directory is used as well. The resulting path is normalized, and trailing slashes are removed unless the path gets resolved to the root directory.
*
* @param pathSegments string paths to join. Non-string arguments are ignored.
*/
export function resolve(...pathSegments: any[]): string;
/**
* Determines whether {path} is an absolute path. An absolute path will always resolve to the same location, regardless of the working directory.
*
* @param path path to test.
*/
export function isAbsolute(path: string): boolean;
/**
* Solve the relative path from {from} to {to}.
* At times we have two absolute paths, and we need to derive the relative path from one to the other. This is actually the reverse transform of path.resolve.
*
* @param from
* @param to
*/
export function relative(from: string, to: string): string;
/**
* Return the directory name of a path. Similar to the Unix dirname command.
*
* @param p the path to evaluate.
*/
export function dirname(p: string): string;
/**
* Return the last portion of a path. Similar to the Unix basename command.
* Often used to extract the file name from a fully qualified path.
*
* @param p the path to evaluate.
* @param ext optionally, an extension to remove from the result.
*/
export function basename(p: string, ext?: string): string;
/**
* Return the extension of the path, from the last '.' to end of string in the last portion of the path.
* If there is no '.' in the last portion of the path or the first character of it is '.', then it returns an empty string
*
* @param p the path to evaluate.
*/
export function extname(p: string): string;
/**
* The platform-specific file separator. '\\' or '/'.
*/
export var sep: string;
/**
* The platform-specific file delimiter. ';' or ':'.
*/
export var delimiter: string;
/**
* Returns an object from a path string - the opposite of format().
*
* @param pathString path to evaluate.
*/
export function parse(pathString: string): ParsedPath;
/**
* Returns a path string from an object - the opposite of parse().
*
* @param pathString path to evaluate.
*/
export function format(pathObject: ParsedPath): string;
export module posix {
export function normalize(p: string): string;
export function join(...paths: any[]): string;
export function resolve(...pathSegments: any[]): string;
export function isAbsolute(p: string): boolean;
export function relative(from: string, to: string): string;
export function dirname(p: string): string;
export function basename(p: string, ext?: string): string;
export function extname(p: string): string;
export var sep: string;
export var delimiter: string;
export function parse(p: string): ParsedPath;
export function format(pP: ParsedPath): string;
}
export module win32 {
export function normalize(p: string): string;
export function join(...paths: any[]): string;
export function resolve(...pathSegments: any[]): string;
export function isAbsolute(p: string): boolean;
export function relative(from: string, to: string): string;
export function dirname(p: string): string;
export function basename(p: string, ext?: string): string;
export function extname(p: string): string;
export var sep: string;
export var delimiter: string;
export function parse(p: string): ParsedPath;
export function format(pP: ParsedPath): string;
}
}
declare module "string_decoder" {
export interface NodeStringDecoder {
write(buffer: Buffer): string;
end(buffer?: Buffer): string;
}
export var StringDecoder: {
new (encoding?: string): NodeStringDecoder;
};
}
declare module "tls" {
import * as crypto from "crypto";
import * as net from "net";
import * as stream from "stream";
var CLIENT_RENEG_LIMIT: number;
var CLIENT_RENEG_WINDOW: number;
export interface Certificate {
/**
* Country code.
*/
C: string;
/**
* Street.
*/
ST: string;
/**
* Locality.
*/
L: string;
/**
* Organization.
*/
O: string;
/**
* Organizational unit.
*/
OU: string;
/**
* Common name.
*/
CN: string;
}
export interface CipherNameAndProtocol {
/**
* The cipher name.
*/
name: string;
/**
* SSL/TLS protocol version.
*/
version: string;
}
export class TLSSocket extends stream.Duplex {
/**
* Construct a new tls.TLSSocket object from an existing TCP socket.
*/
constructor(socket:net.Socket, options?: {
/**
* An optional TLS context object from tls.createSecureContext()
*/
secureContext?: SecureContext,
/**
* If true the TLS socket will be instantiated in server-mode.
* Defaults to false.
*/
isServer?: boolean,
/**
* An optional net.Server instance.
*/
server?: net.Server,
/**
* If true the server will request a certificate from clients that
* connect and attempt to verify that certificate. Defaults to
* false.
*/
requestCert?: boolean,
/**
* If true the server will reject any connection which is not
* authorized with the list of supplied CAs. This option only has an
* effect if requestCert is true. Defaults to false.
*/
rejectUnauthorized?: boolean,
/**
* An array of strings or a Buffer naming possible NPN protocols.
* (Protocols should be ordered by their priority.)
*/
NPNProtocols?: string[] | Buffer,
/**
* An array of strings or a Buffer naming possible ALPN protocols.
* (Protocols should be ordered by their priority.) When the server
* receives both NPN and ALPN extensions from the client, ALPN takes
* precedence over NPN and the server does not send an NPN extension
* to the client.
*/
ALPNProtocols?: string[] | Buffer,
/**
* SNICallback(servername, cb) <Function> A function that will be
* called if the client supports SNI TLS extension. Two arguments
* will be passed when called: servername and cb. SNICallback should
* invoke cb(null, ctx), where ctx is a SecureContext instance.
* (tls.createSecureContext(...) can be used to get a proper
* SecureContext.) If SNICallback wasn't provided the default callback
* with high-level API will be used (see below).
*/
SNICallback?: Function,
/**
* An optional Buffer instance containing a TLS session.
*/
session?: Buffer,
/**
* If true, specifies that the OCSP status request extension will be
* added to the client hello and an 'OCSPResponse' event will be
* emitted on the socket before establishing a secure communication
*/
requestOCSP?: boolean
});
/**
* Returns the bound address, the address family name and port of the underlying socket as reported by
* the operating system.
* @returns {any} - An object with three properties, e.g. { port: 12346, family: 'IPv4', address: '127.0.0.1' }.
*/
address(): { port: number; family: string; address: string };
/**
* A boolean that is true if the peer certificate was signed by one of the specified CAs, otherwise false.
*/
authorized: boolean;
/**
* The reason why the peer's certificate has not been verified.
* This property becomes available only when tlsSocket.authorized === false.
*/
authorizationError: Error;
/**
* Static boolean value, always true.
* May be used to distinguish TLS sockets from regular ones.
*/
encrypted: boolean;
/**
* Returns an object representing the cipher name and the SSL/TLS protocol version of the current connection.
* @returns {CipherNameAndProtocol} - Returns an object representing the cipher name
* and the SSL/TLS protocol version of the current connection.
*/
getCipher(): CipherNameAndProtocol;
/**
* Returns an object representing the peer's certificate.
* The returned object has some properties corresponding to the field of the certificate.
* If detailed argument is true the full chain with issuer property will be returned,
* if false only the top certificate without issuer property.
* If the peer does not provide a certificate, it returns null or an empty object.
* @param {boolean} detailed - If true; the full chain with issuer property will be returned.
* @returns {any} - An object representing the peer's certificate.
*/
getPeerCertificate(detailed?: boolean): {
subject: Certificate;
issuerInfo: Certificate;
issuer: Certificate;
raw: any;
valid_from: string;
valid_to: string;
fingerprint: string;
serialNumber: string;
};
/**
* Could be used to speed up handshake establishment when reconnecting to the server.
* @returns {any} - ASN.1 encoded TLS session or undefined if none was negotiated.
*/
getSession(): any;
/**
* NOTE: Works only with client TLS sockets.
* Useful only for debugging, for session reuse provide session option to tls.connect().
* @returns {any} - TLS session ticket or undefined if none was negotiated.
*/
getTLSTicket(): any;
/**
* The string representation of the local IP address.
*/
localAddress: string;
/**
* The numeric representation of the local port.
*/
localPort: string;
/**
* The string representation of the remote IP address.
* For example, '74.125.127.100' or '2001:4860:a005::68'.
*/
remoteAddress: string;
/**
* The string representation of the remote IP family. 'IPv4' or 'IPv6'.
*/
remoteFamily: string;
/**
* The numeric representation of the remote port. For example, 443.
*/
remotePort: number;
/**
* Initiate TLS renegotiation process.
*
* NOTE: Can be used to request peer's certificate after the secure connection has been established.
* ANOTHER NOTE: When running as the server, socket will be destroyed with an error after handshakeTimeout timeout.
* @param {TlsOptions} options - The options may contain the following fields: rejectUnauthorized,
* requestCert (See tls.createServer() for details).
* @param {Function} callback - callback(err) will be executed with null as err, once the renegotiation
* is successfully completed.
*/
renegotiate(options: TlsOptions, callback: (err: Error) => any): any;
/**
* Set maximum TLS fragment size (default and maximum value is: 16384, minimum is: 512).
* Smaller fragment size decreases buffering latency on the client: large fragments are buffered by
* the TLS layer until the entire fragment is received and its integrity is verified;
* large fragments can span multiple roundtrips, and their processing can be delayed due to packet
* loss or reordering. However, smaller fragments add extra TLS framing bytes and CPU overhead,
* which may decrease overall server throughput.
* @param {number} size - TLS fragment size (default and maximum value is: 16384, minimum is: 512).
* @returns {boolean} - Returns true on success, false otherwise.
*/
setMaxSendFragment(size: number): boolean;
/**
* events.EventEmitter
* 1. OCSPResponse
* 2. secureConnect
**/
addListener(event: string, listener: Function): this;
addListener(event: "OCSPResponse", listener: (response: Buffer) => void): this;
addListener(event: "secureConnect", listener: () => void): this;
emit(event: string, ...args: any[]): boolean;
emit(event: "OCSPResponse", response: Buffer): boolean;
emit(event: "secureConnect"): boolean;
on(event: string, listener: Function): this;
on(event: "OCSPResponse", listener: (response: Buffer) => void): this;
on(event: "secureConnect", listener: () => void): this;
once(event: string, listener: Function): this;
once(event: "OCSPResponse", listener: (response: Buffer) => void): this;
once(event: "secureConnect", listener: () => void): this;
prependListener(event: string, listener: Function): this;
prependListener(event: "OCSPResponse", listener: (response: Buffer) => void): this;
prependListener(event: "secureConnect", listener: () => void): this;
prependOnceListener(event: string, listener: Function): this;
prependOnceListener(event: "OCSPResponse", listener: (response: Buffer) => void): this;
prependOnceListener(event: "secureConnect", listener: () => void): this;
}
export interface TlsOptions {
host?: string;
port?: number;
pfx?: string | Buffer[];
key?: string | string[] | Buffer | any[];
passphrase?: string;
cert?: string | string[] | Buffer | Buffer[];
ca?: string | string[] | Buffer | Buffer[];
crl?: string | string[];
ciphers?: string;
honorCipherOrder?: boolean;
requestCert?: boolean;
rejectUnauthorized?: boolean;
NPNProtocols?: string[] | Buffer;
SNICallback?: (servername: string, cb: (err: Error, ctx: SecureContext) => any) => any;
ecdhCurve?: string;
dhparam?: string | Buffer;
handshakeTimeout?: number;
ALPNProtocols?: string[] | Buffer;
sessionTimeout?: number;
ticketKeys?: any;
sessionIdContext?: string;
secureProtocol?: string;
}
export interface ConnectionOptions {
host?: string;
port?: number;
socket?: net.Socket;
pfx?: string | Buffer
key?: string | string[] | Buffer | Buffer[];
passphrase?: string;
cert?: string | string[] | Buffer | Buffer[];
ca?: string | Buffer | (string | Buffer)[];
rejectUnauthorized?: boolean;
NPNProtocols?: (string | Buffer)[];
servername?: string;
path?: string;
ALPNProtocols?: (string | Buffer)[];
checkServerIdentity?: (servername: string, cert: string | Buffer | (string | Buffer)[]) => any;
secureProtocol?: string;
secureContext?: Object;
session?: Buffer;
minDHSize?: number;
}
export interface Server extends net.Server {
close(callback?: Function): Server;
address(): { port: number; family: string; address: string; };
addContext(hostName: string, credentials: {
key: string;
cert: string;
ca: string;
}): void;
maxConnections: number;
connections: number;
/**
* events.EventEmitter
* 1. tlsClientError
* 2. newSession
* 3. OCSPRequest
* 4. resumeSession
* 5. secureConnection
**/
addListener(event: string, listener: Function): this;
addListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this;
addListener(event: "newSession", listener: (sessionId: any, sessionData: any, callback: (err: Error, resp: Buffer) => void) => void): this;
addListener(event: "OCSPRequest", listener: (certificate: Buffer, issuer: Buffer, callback: Function) => void): this;
addListener(event: "resumeSession", listener: (sessionId: any, callback: (err: Error, sessionData: any) => void) => void): this;
addListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this;
emit(event: string, ...args: any[]): boolean;
emit(event: "tlsClientError", err: Error, tlsSocket: TLSSocket): boolean;
emit(event: "newSession", sessionId: any, sessionData: any, callback: (err: Error, resp: Buffer) => void): boolean;
emit(event: "OCSPRequest", certificate: Buffer, issuer: Buffer, callback: Function): boolean;
emit(event: "resumeSession", sessionId: any, callback: (err: Error, sessionData: any) => void): boolean;
emit(event: "secureConnection", tlsSocket: TLSSocket): boolean;
on(event: string, listener: Function): this;
on(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this;
on(event: "newSession", listener: (sessionId: any, sessionData: any, callback: (err: Error, resp: Buffer) => void) => void): this;
on(event: "OCSPRequest", listener: (certificate: Buffer, issuer: Buffer, callback: Function) => void): this;
on(event: "resumeSession", listener: (sessionId: any, callback: (err: Error, sessionData: any) => void) => void): this;
on(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this;
once(event: string, listener: Function): this;
once(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this;
once(event: "newSession", listener: (sessionId: any, sessionData: any, callback: (err: Error, resp: Buffer) => void) => void): this;
once(event: "OCSPRequest", listener: (certificate: Buffer, issuer: Buffer, callback: Function) => void): this;
once(event: "resumeSession", listener: (sessionId: any, callback: (err: Error, sessionData: any) => void) => void): this;
once(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this;
prependListener(event: string, listener: Function): this;
prependListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this;
prependListener(event: "newSession", listener: (sessionId: any, sessionData: any, callback: (err: Error, resp: Buffer) => void) => void): this;
prependListener(event: "OCSPRequest", listener: (certificate: Buffer, issuer: Buffer, callback: Function) => void): this;
prependListener(event: "resumeSession", listener: (sessionId: any, callback: (err: Error, sessionData: any) => void) => void): this;
prependListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this;
prependOnceListener(event: string, listener: Function): this;
prependOnceListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this;
prependOnceListener(event: "newSession", listener: (sessionId: any, sessionData: any, callback: (err: Error, resp: Buffer) => void) => void): this;
prependOnceListener(event: "OCSPRequest", listener: (certificate: Buffer, issuer: Buffer, callback: Function) => void): this;
prependOnceListener(event: "resumeSession", listener: (sessionId: any, callback: (err: Error, sessionData: any) => void) => void): this;
prependOnceListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this;
}
export interface ClearTextStream extends stream.Duplex {
authorized: boolean;
authorizationError: Error;
getPeerCertificate(): any;
getCipher: {
name: string;
version: string;
};
address: {
port: number;
family: string;
address: string;
};
remoteAddress: string;
remotePort: number;
}
export interface SecurePair {
encrypted: any;
cleartext: any;
}
export interface SecureContextOptions {
pfx?: string | Buffer;
key?: string | Buffer;
passphrase?: string;
cert?: string | Buffer;
ca?: string | Buffer;
crl?: string | string[]
ciphers?: string;
honorCipherOrder?: boolean;
}
export interface SecureContext {
context: any;
}
export function createServer(options: TlsOptions, secureConnectionListener?: (cleartextStream: ClearTextStream) => void): Server;
export function connect(options: ConnectionOptions, secureConnectionListener?: () => void): ClearTextStream;
export function connect(port: number, host?: string, options?: ConnectionOptions, secureConnectListener?: () => void): ClearTextStream;
export function connect(port: number, options?: ConnectionOptions, secureConnectListener?: () => void): ClearTextStream;
export function createSecurePair(credentials?: crypto.Credentials, isServer?: boolean, requestCert?: boolean, rejectUnauthorized?: boolean): SecurePair;
export function createSecureContext(details: SecureContextOptions): SecureContext;
}
declare module "crypto" {
export interface Certificate {
exportChallenge(spkac: string | Buffer): Buffer;
exportPublicKey(spkac: string | Buffer): Buffer;
verifySpkac(spkac: Buffer): boolean;
}
export var Certificate: {
new (): Certificate;
(): Certificate;
}
export var fips: boolean;
export interface CredentialDetails {
pfx: string;
key: string;
passphrase: string;
cert: string;
ca: string | string[];
crl: string | string[];
ciphers: string;
}
export interface Credentials { context?: any; }
export function createCredentials(details: CredentialDetails): Credentials;
export function createHash(algorithm: string): Hash;
export function createHmac(algorithm: string, key: string | Buffer): Hmac;
type Utf8AsciiLatin1Encoding = "utf8" | "ascii" | "latin1";
type HexBase64Latin1Encoding = "latin1" | "hex" | "base64";
type Utf8AsciiBinaryEncoding = "utf8" | "ascii" | "binary";
type HexBase64BinaryEncoding = "binary" | "base64" | "hex";
type ECDHKeyFormat = "compressed" | "uncompressed" | "hybrid";
export interface Hash extends NodeJS.ReadWriteStream {
update(data: string | Buffer): Hash;
update(data: string | Buffer, input_encoding: Utf8AsciiLatin1Encoding): Hash;
digest(): Buffer;
digest(encoding: HexBase64Latin1Encoding): string;
}
export interface Hmac extends NodeJS.ReadWriteStream {
update(data: string | Buffer): Hmac;
update(data: string | Buffer, input_encoding: Utf8AsciiLatin1Encoding): Hmac;
digest(): Buffer;
digest(encoding: HexBase64Latin1Encoding): string;
}
export function createCipher(algorithm: string, password: any): Cipher;
export function createCipheriv(algorithm: string, key: any, iv: any): Cipher;
export interface Cipher extends NodeJS.ReadWriteStream {
update(data: Buffer): Buffer;
update(data: string, input_encoding: Utf8AsciiBinaryEncoding): Buffer;
update(data: Buffer, input_encoding: any, output_encoding: HexBase64BinaryEncoding): string;
update(data: string, input_encoding: Utf8AsciiBinaryEncoding, output_encoding: HexBase64BinaryEncoding): string;
final(): Buffer;
final(output_encoding: string): string;
setAutoPadding(auto_padding?: boolean): void;
getAuthTag(): Buffer;
setAAD(buffer: Buffer): void;
}
export function createDecipher(algorithm: string, password: any): Decipher;
export function createDecipheriv(algorithm: string, key: any, iv: any): Decipher;
export interface Decipher extends NodeJS.ReadWriteStream {
update(data: Buffer): Buffer;
update(data: string, input_encoding: HexBase64BinaryEncoding): Buffer;
update(data: Buffer, input_encoding: any, output_encoding: Utf8AsciiBinaryEncoding): string;
update(data: string, input_encoding: HexBase64BinaryEncoding, output_encoding: Utf8AsciiBinaryEncoding): string;
final(): Buffer;
final(output_encoding: string): string;
setAutoPadding(auto_padding?: boolean): void;
setAuthTag(tag: Buffer): void;
setAAD(buffer: Buffer): void;
}
export function createSign(algorithm: string): Signer;
export interface Signer extends NodeJS.WritableStream {
update(data: string | Buffer): Signer;
update(data: string | Buffer, input_encoding: Utf8AsciiLatin1Encoding): Signer;
sign(private_key: string | { key: string; passphrase: string }): Buffer;
sign(private_key: string | { key: string; passphrase: string }, output_format: HexBase64Latin1Encoding): string;
}
export function createVerify(algorith: string): Verify;
export interface Verify extends NodeJS.WritableStream {
update(data: string | Buffer): Verify;
update(data: string | Buffer, input_encoding: Utf8AsciiLatin1Encoding): Verify;
verify(object: string, signature: Buffer): boolean;
verify(object: string, signature: string, signature_format: HexBase64Latin1Encoding): boolean;
}
export function createDiffieHellman(prime_length: number, generator?: number): DiffieHellman;
export function createDiffieHellman(prime: Buffer): DiffieHellman;
export function createDiffieHellman(prime: string, prime_encoding: HexBase64Latin1Encoding): DiffieHellman;
export function createDiffieHellman(prime: string, prime_encoding: HexBase64Latin1Encoding, generator: number | Buffer): DiffieHellman;
export function createDiffieHellman(prime: string, prime_encoding: HexBase64Latin1Encoding, generator: string, generator_encoding: HexBase64Latin1Encoding): DiffieHellman;
export interface DiffieHellman {
generateKeys(): Buffer;
generateKeys(encoding: HexBase64Latin1Encoding): string;
computeSecret(other_public_key: Buffer): Buffer;
computeSecret(other_public_key: string, input_encoding: HexBase64Latin1Encoding): Buffer;
computeSecret(other_public_key: string, input_encoding: HexBase64Latin1Encoding, output_encoding: HexBase64Latin1Encoding): string;
getPrime(): Buffer;
getPrime(encoding: HexBase64Latin1Encoding): string;
getGenerator(): Buffer;
getGenerator(encoding: HexBase64Latin1Encoding): string;
getPublicKey(): Buffer;
getPublicKey(encoding: HexBase64Latin1Encoding): string;
getPrivateKey(): Buffer;
getPrivateKey(encoding: HexBase64Latin1Encoding): string;
setPublicKey(public_key: Buffer): void;
setPublicKey(public_key: string, encoding: string): void;
setPrivateKey(private_key: Buffer): void;
setPrivateKey(private_key: string, encoding: string): void;
verifyError: number;
}
export function getDiffieHellman(group_name: string): DiffieHellman;
export function pbkdf2(password: string | Buffer, salt: string | Buffer, iterations: number, keylen: number, digest: string, callback: (err: Error, derivedKey: Buffer) => any): void;
export function pbkdf2Sync(password: string | Buffer, salt: string | Buffer, iterations: number, keylen: number, digest: string): Buffer;
export function randomBytes(size: number): Buffer;
export function randomBytes(size: number, callback: (err: Error, buf: Buffer) => void): void;
export function pseudoRandomBytes(size: number): Buffer;
export function pseudoRandomBytes(size: number, callback: (err: Error, buf: Buffer) => void): void;
export interface RsaPublicKey {
key: string;
padding?: number;
}
export interface RsaPrivateKey {
key: string;
passphrase?: string,
padding?: number;
}
export function publicEncrypt(public_key: string | RsaPublicKey, buffer: Buffer): Buffer
export function privateDecrypt(private_key: string | RsaPrivateKey, buffer: Buffer): Buffer
export function privateEncrypt(private_key: string | RsaPrivateKey, buffer: Buffer): Buffer
export function publicDecrypt(public_key: string | RsaPublicKey, buffer: Buffer): Buffer
export function getCiphers(): string[];
export function getCurves(): string[];
export function getHashes(): string[];
export interface ECDH {
generateKeys(): Buffer;
generateKeys(encoding: HexBase64Latin1Encoding): string;
generateKeys(encoding: HexBase64Latin1Encoding, format: ECDHKeyFormat): string;
computeSecret(other_public_key: Buffer): Buffer;
computeSecret(other_public_key: string, input_encoding: HexBase64Latin1Encoding): Buffer;
computeSecret(other_public_key: string, input_encoding: HexBase64Latin1Encoding, output_encoding: HexBase64Latin1Encoding): string;
getPrivateKey(): Buffer;
getPrivateKey(encoding: HexBase64Latin1Encoding): string;
getPublicKey(): Buffer;
getPublicKey(encoding: HexBase64Latin1Encoding): string;
getPublicKey(encoding: HexBase64Latin1Encoding, format: ECDHKeyFormat): string;
setPrivateKey(private_key: Buffer): void;
setPrivateKey(private_key: string, encoding: HexBase64Latin1Encoding): void;
}
export function createECDH(curve_name: string): ECDH;
export function timingSafeEqual(a: Buffer, b: Buffer): boolean;
export var DEFAULT_ENCODING: string;
}
declare module "stream" {
import * as events from "events";
class internal extends events.EventEmitter {
pipe<T extends NodeJS.WritableStream>(destination: T, options?: { end?: boolean; }): T;
}
namespace internal {
export class Stream extends internal { }
export interface ReadableOptions {
highWaterMark?: number;
encoding?: string;
objectMode?: boolean;
read?: (size?: number) => any;
}
export class Readable extends events.EventEmitter implements NodeJS.ReadableStream {
readable: boolean;
constructor(opts?: ReadableOptions);
protected _read(size: number): void;
read(size?: number): any;
setEncoding(encoding: string): void;
pause(): Readable;
resume(): Readable;
pipe<T extends NodeJS.WritableStream>(destination: T, options?: { end?: boolean; }): T;
unpipe<T extends NodeJS.WritableStream>(destination?: T): void;
unshift(chunk: any): void;
wrap(oldStream: NodeJS.ReadableStream): NodeJS.ReadableStream;
push(chunk: any, encoding?: string): boolean;
/**
* Event emitter
* The defined events on documents including:
* 1. close
* 2. data
* 3. end
* 4. readable
* 5. error
**/
addListener(event: string, listener: Function): this;
addListener(event: string, listener: Function): this;
addListener(event: "close", listener: () => void): this;
addListener(event: "data", listener: (chunk: Buffer | string) => void): this;
addListener(event: "end", listener: () => void): this;
addListener(event: "readable", listener: () => void): this;
addListener(event: "error", listener: (err: Error) => void): this;
emit(event: string, ...args: any[]): boolean;
emit(event: "close"): boolean;
emit(event: "data", chunk: Buffer | string): boolean;
emit(event: "end"): boolean;
emit(event: "readable"): boolean;
emit(event: "error", err: Error): boolean;
on(event: string, listener: Function): this;
on(event: "close", listener: () => void): this;
on(event: "data", listener: (chunk: Buffer | string) => void): this;
on(event: "end", listener: () => void): this;
on(event: "readable", listener: () => void): this;
on(event: "error", listener: (err: Error) => void): this;
once(event: string, listener: Function): this;
once(event: "close", listener: () => void): this;
once(event: "data", listener: (chunk: Buffer | string) => void): this;
once(event: "end", listener: () => void): this;
once(event: "readable", listener: () => void): this;
once(event: "error", listener: (err: Error) => void): this;
prependListener(event: string, listener: Function): this;
prependListener(event: "close", listener: () => void): this;
prependListener(event: "data", listener: (chunk: Buffer | string) => void): this;
prependListener(event: "end", listener: () => void): this;
prependListener(event: "readable", listener: () => void): this;
prependListener(event: "error", listener: (err: Error) => void): this;
prependOnceListener(event: string, listener: Function): this;
prependOnceListener(event: "close", listener: () => void): this;
prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this;
prependOnceListener(event: "end", listener: () => void): this;
prependOnceListener(event: "readable", listener: () => void): this;
prependOnceListener(event: "error", listener: (err: Error) => void): this;
removeListener(event: string, listener: Function): this;
removeListener(event: "close", listener: () => void): this;
removeListener(event: "data", listener: (chunk: Buffer | string) => void): this;
removeListener(event: "end", listener: () => void): this;
removeListener(event: "readable", listener: () => void): this;
removeListener(event: "error", listener: (err: Error) => void): this;
}
export interface WritableOptions {
highWaterMark?: number;
decodeStrings?: boolean;
objectMode?: boolean;
write?: (chunk: string | Buffer, encoding: string, callback: Function) => any;
writev?: (chunks: { chunk: string | Buffer, encoding: string }[], callback: Function) => any;
}
export class Writable extends events.EventEmitter implements NodeJS.WritableStream {
writable: boolean;
constructor(opts?: WritableOptions);
protected _write(chunk: any, encoding: string, callback: Function): void;
write(chunk: any, cb?: Function): boolean;
write(chunk: any, encoding?: string, cb?: Function): boolean;
end(): void;
end(chunk: any, cb?: Function): void;
end(chunk: any, encoding?: string, cb?: Function): void;
/**
* Event emitter
* The defined events on documents including:
* 1. close
* 2. drain
* 3. error
* 4. finish
* 5. pipe
* 6. unpipe
**/
addListener(event: string, listener: Function): this;
addListener(event: "close", listener: () => void): this;
addListener(event: "drain", listener: () => void): this;
addListener(event: "error", listener: (err: Error) => void): this;
addListener(event: "finish", listener: () => void): this;
addListener(event: "pipe", listener: (src: Readable) => void): this;
addListener(event: "unpipe", listener: (src: Readable) => void): this;
emit(event: string, ...args: any[]): boolean;
emit(event: "close"): boolean;
emit(event: "drain", chunk: Buffer | string): boolean;
emit(event: "error", err: Error): boolean;
emit(event: "finish"): boolean;
emit(event: "pipe", src: Readable): boolean;
emit(event: "unpipe", src: Readable): boolean;
on(event: string, listener: Function): this;
on(event: "close", listener: () => void): this;
on(event: "drain", listener: () => void): this;
on(event: "error", listener: (err: Error) => void): this;
on(event: "finish", listener: () => void): this;
on(event: "pipe", listener: (src: Readable) => void): this;
on(event: "unpipe", listener: (src: Readable) => void): this;
once(event: string, listener: Function): this;
once(event: "close", listener: () => void): this;
once(event: "drain", listener: () => void): this;
once(event: "error", listener: (err: Error) => void): this;
once(event: "finish", listener: () => void): this;
once(event: "pipe", listener: (src: Readable) => void): this;
once(event: "unpipe", listener: (src: Readable) => void): this;
prependListener(event: string, listener: Function): this;
prependListener(event: "close", listener: () => void): this;
prependListener(event: "drain", listener: () => void): this;
prependListener(event: "error", listener: (err: Error) => void): this;
prependListener(event: "finish", listener: () => void): this;
prependListener(event: "pipe", listener: (src: Readable) => void): this;
prependListener(event: "unpipe", listener: (src: Readable) => void): this;
prependOnceListener(event: string, listener: Function): this;
prependOnceListener(event: "close", listener: () => void): this;
prependOnceListener(event: "drain", listener: () => void): this;
prependOnceListener(event: "error", listener: (err: Error) => void): this;
prependOnceListener(event: "finish", listener: () => void): this;
prependOnceListener(event: "pipe", listener: (src: Readable) => void): this;
prependOnceListener(event: "unpipe", listener: (src: Readable) => void): this;
removeListener(event: string, listener: Function): this;
removeListener(event: "close", listener: () => void): this;
removeListener(event: "drain", listener: () => void): this;
removeListener(event: "error", listener: (err: Error) => void): this;
removeListener(event: "finish", listener: () => void): this;
removeListener(event: "pipe", listener: (src: Readable) => void): this;
removeListener(event: "unpipe", listener: (src: Readable) => void): this;
}
export interface DuplexOptions extends ReadableOptions, WritableOptions {
allowHalfOpen?: boolean;
readableObjectMode?: boolean;
writableObjectMode?: boolean;
}
// Note: Duplex extends both Readable and Writable.
export class Duplex extends Readable implements NodeJS.ReadWriteStream {
// Readable
pause(): Duplex;
resume(): Duplex;
// Writeable
writable: boolean;
constructor(opts?: DuplexOptions);
protected _write(chunk: any, encoding: string, callback: Function): void;
write(chunk: any, cb?: Function): boolean;
write(chunk: any, encoding?: string, cb?: Function): boolean;
end(): void;
end(chunk: any, cb?: Function): void;
end(chunk: any, encoding?: string, cb?: Function): void;
}
export interface TransformOptions extends DuplexOptions {
transform?: (chunk: string | Buffer, encoding: string, callback: Function) => any;
flush?: (callback: Function) => any;
}
// Note: Transform lacks the _read and _write methods of Readable/Writable.
export class Transform extends events.EventEmitter implements NodeJS.ReadWriteStream {
readable: boolean;
writable: boolean;
constructor(opts?: TransformOptions);
protected _transform(chunk: any, encoding: string, callback: Function): void;
protected _flush(callback: Function): void;
read(size?: number): any;
setEncoding(encoding: string): void;
pause(): Transform;
resume(): Transform;
pipe<T extends NodeJS.WritableStream>(destination: T, options?: { end?: boolean; }): T;
unpipe<T extends NodeJS.WritableStream>(destination?: T): void;
unshift(chunk: any): void;
wrap(oldStream: NodeJS.ReadableStream): NodeJS.ReadableStream;
push(chunk: any, encoding?: string): boolean;
write(chunk: any, cb?: Function): boolean;
write(chunk: any, encoding?: string, cb?: Function): boolean;
end(): void;
end(chunk: any, cb?: Function): void;
end(chunk: any, encoding?: string, cb?: Function): void;
}
export class PassThrough extends Transform { }
}
export = internal;
}
declare module "util" {
export interface InspectOptions {
showHidden?: boolean;
depth?: number;
colors?: boolean;
customInspect?: boolean;
}
export function format(format: any, ...param: any[]): string;
export function debug(string: string): void;
export function error(...param: any[]): void;
export function puts(...param: any[]): void;
export function print(...param: any[]): void;
export function log(string: string): void;
export function inspect(object: any, showHidden?: boolean, depth?: number, color?: boolean): string;
export function inspect(object: any, options: InspectOptions): string;
export function isArray(object: any): boolean;
export function isRegExp(object: any): boolean;
export function isDate(object: any): boolean;
export function isError(object: any): boolean;
export function inherits(constructor: any, superConstructor: any): void;
export function debuglog(key: string): (msg: string, ...param: any[]) => void;
export function isBoolean(object: any): boolean;
export function isBuffer(object: any): boolean;
export function isFunction(object: any): boolean;
export function isNull(object: any): boolean;
export function isNullOrUndefined(object: any): boolean;
export function isNumber(object: any): boolean;
export function isObject(object: any): boolean;
export function isPrimitive(object: any): boolean;
export function isString(object: any): boolean;
export function isSymbol(object: any): boolean;
export function isUndefined(object: any): boolean;
export function deprecate(fn: Function, message: string): Function;
}
declare module "assert" {
function internal(value: any, message?: string): void;
namespace internal {
export class AssertionError implements Error {
name: string;
message: string;
actual: any;
expected: any;
operator: string;
generatedMessage: boolean;
constructor(options?: {
message?: string; actual?: any; expected?: any;
operator?: string; stackStartFunction?: Function
});
}
export function fail(actual: any, expected: any, message: string, operator: string): void;
export function ok(value: any, message?: string): void;
export function equal(actual: any, expected: any, message?: string): void;
export function notEqual(actual: any, expected: any, message?: string): void;
export function deepEqual(actual: any, expected: any, message?: string): void;
export function notDeepEqual(acutal: any, expected: any, message?: string): void;
export function strictEqual(actual: any, expected: any, message?: string): void;
export function notStrictEqual(actual: any, expected: any, message?: string): void;
export function deepStrictEqual(actual: any, expected: any, message?: string): void;
export function notDeepStrictEqual(actual: any, expected: any, message?: string): void;
export var throws: {
(block: Function, message?: string): void;
(block: Function, error: Function, message?: string): void;
(block: Function, error: RegExp, message?: string): void;
(block: Function, error: (err: any) => boolean, message?: string): void;
};
export var doesNotThrow: {
(block: Function, message?: string): void;
(block: Function, error: Function, message?: string): void;
(block: Function, error: RegExp, message?: string): void;
(block: Function, error: (err: any) => boolean, message?: string): void;
};
export function ifError(value: any): void;
}
export = internal;
}
declare module "tty" {
import * as net from "net";
export function isatty(fd: number): boolean;
export interface ReadStream extends net.Socket {
isRaw: boolean;
setRawMode(mode: boolean): void;
isTTY: boolean;
}
export interface WriteStream extends net.Socket {
columns: number;
rows: number;
isTTY: boolean;
}
}
declare module "domain" {
import * as events from "events";
export class Domain extends events.EventEmitter implements NodeJS.Domain {
run(fn: Function): void;
add(emitter: events.EventEmitter): void;
remove(emitter: events.EventEmitter): void;
bind(cb: (err: Error, data: any) => any): any;
intercept(cb: (data: any) => any): any;
dispose(): void;
members: any[];
enter(): void;
exit(): void;
}
export function create(): Domain;
}
declare module "constants" {
export var E2BIG: number;
export var EACCES: number;
export var EADDRINUSE: number;
export var EADDRNOTAVAIL: number;
export var EAFNOSUPPORT: number;
export var EAGAIN: number;
export var EALREADY: number;
export var EBADF: number;
export var EBADMSG: number;
export var EBUSY: number;
export var ECANCELED: number;
export var ECHILD: number;
export var ECONNABORTED: number;
export var ECONNREFUSED: number;
export var ECONNRESET: number;
export var EDEADLK: number;
export var EDESTADDRREQ: number;
export var EDOM: number;
export var EEXIST: number;
export var EFAULT: number;
export var EFBIG: number;
export var EHOSTUNREACH: number;
export var EIDRM: number;
export var EILSEQ: number;
export var EINPROGRESS: number;
export var EINTR: number;
export var EINVAL: number;
export var EIO: number;
export var EISCONN: number;
export var EISDIR: number;
export var ELOOP: number;
export var EMFILE: number;
export var EMLINK: number;
export var EMSGSIZE: number;
export var ENAMETOOLONG: number;
export var ENETDOWN: number;
export var ENETRESET: number;
export var ENETUNREACH: number;
export var ENFILE: number;
export var ENOBUFS: number;
export var ENODATA: number;
export var ENODEV: number;
export var ENOENT: number;
export var ENOEXEC: number;
export var ENOLCK: number;
export var ENOLINK: number;
export var ENOMEM: number;
export var ENOMSG: number;
export var ENOPROTOOPT: number;
export var ENOSPC: number;
export var ENOSR: number;
export var ENOSTR: number;
export var ENOSYS: number;
export var ENOTCONN: number;
export var ENOTDIR: number;
export var ENOTEMPTY: number;
export var ENOTSOCK: number;
export var ENOTSUP: number;
export var ENOTTY: number;
export var ENXIO: number;
export var EOPNOTSUPP: number;
export var EOVERFLOW: number;
export var EPERM: number;
export var EPIPE: number;
export var EPROTO: number;
export var EPROTONOSUPPORT: number;
export var EPROTOTYPE: number;
export var ERANGE: number;
export var EROFS: number;
export var ESPIPE: number;
export var ESRCH: number;
export var ETIME: number;
export var ETIMEDOUT: number;
export var ETXTBSY: number;
export var EWOULDBLOCK: number;
export var EXDEV: number;
export var WSAEINTR: number;
export var WSAEBADF: number;
export var WSAEACCES: number;
export var WSAEFAULT: number;
export var WSAEINVAL: number;
export var WSAEMFILE: number;
export var WSAEWOULDBLOCK: number;
export var WSAEINPROGRESS: number;
export var WSAEALREADY: number;
export var WSAENOTSOCK: number;
export var WSAEDESTADDRREQ: number;
export var WSAEMSGSIZE: number;
export var WSAEPROTOTYPE: number;
export var WSAENOPROTOOPT: number;
export var WSAEPROTONOSUPPORT: number;
export var WSAESOCKTNOSUPPORT: number;
export var WSAEOPNOTSUPP: number;
export var WSAEPFNOSUPPORT: number;
export var WSAEAFNOSUPPORT: number;
export var WSAEADDRINUSE: number;
export var WSAEADDRNOTAVAIL: number;
export var WSAENETDOWN: number;
export var WSAENETUNREACH: number;
export var WSAENETRESET: number;
export var WSAECONNABORTED: number;
export var WSAECONNRESET: number;
export var WSAENOBUFS: number;
export var WSAEISCONN: number;
export var WSAENOTCONN: number;
export var WSAESHUTDOWN: number;
export var WSAETOOMANYREFS: number;
export var WSAETIMEDOUT: number;
export var WSAECONNREFUSED: number;
export var WSAELOOP: number;
export var WSAENAMETOOLONG: number;
export var WSAEHOSTDOWN: number;
export var WSAEHOSTUNREACH: number;
export var WSAENOTEMPTY: number;
export var WSAEPROCLIM: number;
export var WSAEUSERS: number;
export var WSAEDQUOT: number;
export var WSAESTALE: number;
export var WSAEREMOTE: number;
export var WSASYSNOTREADY: number;
export var WSAVERNOTSUPPORTED: number;
export var WSANOTINITIALISED: number;
export var WSAEDISCON: number;
export var WSAENOMORE: number;
export var WSAECANCELLED: number;
export var WSAEINVALIDPROCTABLE: number;
export var WSAEINVALIDPROVIDER: number;
export var WSAEPROVIDERFAILEDINIT: number;
export var WSASYSCALLFAILURE: number;
export var WSASERVICE_NOT_FOUND: number;
export var WSATYPE_NOT_FOUND: number;
export var WSA_E_NO_MORE: number;
export var WSA_E_CANCELLED: number;
export var WSAEREFUSED: number;
export var SIGHUP: number;
export var SIGINT: number;
export var SIGILL: number;
export var SIGABRT: number;
export var SIGFPE: number;
export var SIGKILL: number;
export var SIGSEGV: number;
export var SIGTERM: number;
export var SIGBREAK: number;
export var SIGWINCH: number;
export var SSL_OP_ALL: number;
export var SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION: number;
export var SSL_OP_CIPHER_SERVER_PREFERENCE: number;
export var SSL_OP_CISCO_ANYCONNECT: number;
export var SSL_OP_COOKIE_EXCHANGE: number;
export var SSL_OP_CRYPTOPRO_TLSEXT_BUG: number;
export var SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS: number;
export var SSL_OP_EPHEMERAL_RSA: number;
export var SSL_OP_LEGACY_SERVER_CONNECT: number;
export var SSL_OP_MICROSOFT_BIG_SSLV3_BUFFER: number;
export var SSL_OP_MICROSOFT_SESS_ID_BUG: number;
export var SSL_OP_MSIE_SSLV2_RSA_PADDING: number;
export var SSL_OP_NETSCAPE_CA_DN_BUG: number;
export var SSL_OP_NETSCAPE_CHALLENGE_BUG: number;
export var SSL_OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG: number;
export var SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG: number;
export var SSL_OP_NO_COMPRESSION: number;
export var SSL_OP_NO_QUERY_MTU: number;
export var SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION: number;
export var SSL_OP_NO_SSLv2: number;
export var SSL_OP_NO_SSLv3: number;
export var SSL_OP_NO_TICKET: number;
export var SSL_OP_NO_TLSv1: number;
export var SSL_OP_NO_TLSv1_1: number;
export var SSL_OP_NO_TLSv1_2: number;
export var SSL_OP_PKCS1_CHECK_1: number;
export var SSL_OP_PKCS1_CHECK_2: number;
export var SSL_OP_SINGLE_DH_USE: number;
export var SSL_OP_SINGLE_ECDH_USE: number;
export var SSL_OP_SSLEAY_080_CLIENT_DH_BUG: number;
export var SSL_OP_SSLREF2_REUSE_CERT_TYPE_BUG: number;
export var SSL_OP_TLS_BLOCK_PADDING_BUG: number;
export var SSL_OP_TLS_D5_BUG: number;
export var SSL_OP_TLS_ROLLBACK_BUG: number;
export var ENGINE_METHOD_DSA: number;
export var ENGINE_METHOD_DH: number;
export var ENGINE_METHOD_RAND: number;
export var ENGINE_METHOD_ECDH: number;
export var ENGINE_METHOD_ECDSA: number;
export var ENGINE_METHOD_CIPHERS: number;
export var ENGINE_METHOD_DIGESTS: number;
export var ENGINE_METHOD_STORE: number;
export var ENGINE_METHOD_PKEY_METHS: number;
export var ENGINE_METHOD_PKEY_ASN1_METHS: number;
export var ENGINE_METHOD_ALL: number;
export var ENGINE_METHOD_NONE: number;
export var DH_CHECK_P_NOT_SAFE_PRIME: number;
export var DH_CHECK_P_NOT_PRIME: number;
export var DH_UNABLE_TO_CHECK_GENERATOR: number;
export var DH_NOT_SUITABLE_GENERATOR: number;
export var NPN_ENABLED: number;
export var RSA_PKCS1_PADDING: number;
export var RSA_SSLV23_PADDING: number;
export var RSA_NO_PADDING: number;
export var RSA_PKCS1_OAEP_PADDING: number;
export var RSA_X931_PADDING: number;
export var RSA_PKCS1_PSS_PADDING: number;
export var POINT_CONVERSION_COMPRESSED: number;
export var POINT_CONVERSION_UNCOMPRESSED: number;
export var POINT_CONVERSION_HYBRID: number;
export var O_RDONLY: number;
export var O_WRONLY: number;
export var O_RDWR: number;
export var S_IFMT: number;
export var S_IFREG: number;
export var S_IFDIR: number;
export var S_IFCHR: number;
export var S_IFBLK: number;
export var S_IFIFO: number;
export var S_IFSOCK: number;
export var S_IRWXU: number;
export var S_IRUSR: number;
export var S_IWUSR: number;
export var S_IXUSR: number;
export var S_IRWXG: number;
export var S_IRGRP: number;
export var S_IWGRP: number;
export var S_IXGRP: number;
export var S_IRWXO: number;
export var S_IROTH: number;
export var S_IWOTH: number;
export var S_IXOTH: number;
export var S_IFLNK: number;
export var O_CREAT: number;
export var O_EXCL: number;
export var O_NOCTTY: number;
export var O_DIRECTORY: number;
export var O_NOATIME: number;
export var O_NOFOLLOW: number;
export var O_SYNC: number;
export var O_SYMLINK: number;
export var O_DIRECT: number;
export var O_NONBLOCK: number;
export var O_TRUNC: number;
export var O_APPEND: number;
export var F_OK: number;
export var R_OK: number;
export var W_OK: number;
export var X_OK: number;
export var UV_UDP_REUSEADDR: number;
export var SIGQUIT: number;
export var SIGTRAP: number;
export var SIGIOT: number;
export var SIGBUS: number;
export var SIGUSR1: number;
export var SIGUSR2: number;
export var SIGPIPE: number;
export var SIGALRM: number;
export var SIGCHLD: number;
export var SIGSTKFLT: number;
export var SIGCONT: number;
export var SIGSTOP: number;
export var SIGTSTP: number;
export var SIGTTIN: number;
export var SIGTTOU: number;
export var SIGURG: number;
export var SIGXCPU: number;
export var SIGXFSZ: number;
export var SIGVTALRM: number;
export var SIGPROF: number;
export var SIGIO: number;
export var SIGPOLL: number;
export var SIGPWR: number;
export var SIGSYS: number;
export var SIGUNUSED: number;
export var defaultCoreCipherList: string;
export var defaultCipherList: string;
export var ENGINE_METHOD_RSA: number;
export var ALPN_ENABLED: number;
}
declare module "process" {
export = process;
}
declare module "v8" {
interface HeapSpaceInfo {
space_name: string;
space_size: number;
space_used_size: number;
space_available_size: number;
physical_space_size: number;
}
export function getHeapStatistics(): { total_heap_size: number, total_heap_size_executable: number, total_physical_size: number, total_avaialble_size: number, used_heap_size: number, heap_size_limit: number };
export function getHeapSpaceStatistics(): HeapSpaceInfo[];
export function setFlagsFromString(flags: string): void;
}
declare module "timers" {
export function setTimeout(callback: (...args: any[]) => void, ms: number, ...args: any[]): NodeJS.Timer;
export function clearTimeout(timeoutId: NodeJS.Timer): void;
export function setInterval(callback: (...args: any[]) => void, ms: number, ...args: any[]): NodeJS.Timer;
export function clearInterval(intervalId: NodeJS.Timer): void;
export function setImmediate(callback: (...args: any[]) => void, ...args: any[]): any;
export function clearImmediate(immediateId: any): void;
}
declare module "console" {
export = console;
}
/**
* _debugger module is not documented.
* Source code is at https://github.com/nodejs/node/blob/master/lib/_debugger.js
*/
declare module "_debugger" {
export interface Packet {
raw: string;
headers: string[];
body: Message;
}
export interface Message {
seq: number;
type: string;
}
export interface RequestInfo {
command: string;
arguments: any;
}
export interface Request extends Message, RequestInfo {
}
export interface Event extends Message {
event: string;
body?: any;
}
export interface Response extends Message {
request_seq: number;
success: boolean;
/** Contains error message if success === false. */
message?: string;
/** Contains message body if success === true. */
body?: any;
}
export interface BreakpointMessageBody {
type: string;
target: number;
line: number;
}
export class Protocol {
res: Packet;
state: string;
execute(data: string): void;
serialize(rq: Request): string;
onResponse: (pkt: Packet) => void;
}
export var NO_FRAME: number;
export var port: number;
export interface ScriptDesc {
name: string;
id: number;
isNative?: boolean;
handle?: number;
type: string;
lineOffset?: number;
columnOffset?: number;
lineCount?: number;
}
export interface Breakpoint {
id: number;
scriptId: number;
script: ScriptDesc;
line: number;
condition?: string;
scriptReq?: string;
}
export interface RequestHandler {
(err: boolean, body: Message, res: Packet): void;
request_seq?: number;
}
export interface ResponseBodyHandler {
(err: boolean, body?: any): void;
request_seq?: number;
}
export interface ExceptionInfo {
text: string;
}
export interface BreakResponse {
script?: ScriptDesc;
exception?: ExceptionInfo;
sourceLine: number;
sourceLineText: string;
sourceColumn: number;
}
export function SourceInfo(body: BreakResponse): string;
export interface ClientInstance extends NodeJS.EventEmitter {
protocol: Protocol;
scripts: ScriptDesc[];
handles: ScriptDesc[];
breakpoints: Breakpoint[];
currentSourceLine: number;
currentSourceColumn: number;
currentSourceLineText: string;
currentFrame: number;
currentScript: string;
connect(port: number, host: string): void;
req(req: any, cb: RequestHandler): void;
reqFrameEval(code: string, frame: number, cb: RequestHandler): void;
mirrorObject(obj: any, depth: number, cb: ResponseBodyHandler): void;
setBreakpoint(rq: BreakpointMessageBody, cb: RequestHandler): void;
clearBreakpoint(rq: Request, cb: RequestHandler): void;
listbreakpoints(cb: RequestHandler): void;
reqSource(from: number, to: number, cb: RequestHandler): void;
reqScripts(cb: any): void;
reqContinue(cb: RequestHandler): void;
}
export var Client : {
new (): ClientInstance
}
}
| {
"content_hash": "483e28ccb76ad1cb4affcd0d480824d7",
"timestamp": "",
"source": "github",
"line_count": 4101,
"max_line_length": 358,
"avg_line_length": 46.17020238966106,
"alnum_prop": 0.6208329812404935,
"repo_name": "wongkristen/Easy_Eats",
"id": "c3a10b8236531f9c215dd59272f880a23153c5ed",
"size": "189344",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "node_modules/@types/node/index.d.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1019"
},
{
"name": "HTML",
"bytes": "775824"
},
{
"name": "JavaScript",
"bytes": "8755"
},
{
"name": "TypeScript",
"bytes": "18897"
}
],
"symlink_target": ""
} |
namespace answer
{
class Operation
{
protected:
std::string _name;
virtual void process(Response&, const std::string ¶ms, const Context::Accepts &accepts) = 0;
public:
Operation(const std::string &name)
{
size_t pos = name.rfind("::");
_name = (pos != name.npos ? name.substr(pos + 2) : name);
}
virtual ~Operation() {};
// The invocation wrapper
Response invoke(const std::string ¶ms, const Context::Accepts &accepts);
};
template <typename RequestT>
RequestT requestPart(const std::string &name, const std::string ¶ms)
{
RequestT request;
std::istringstream ssIn(params);
{
archive::ws_xml_iarchive inA(ssIn);
inA >> boost::serialization::make_nvp(name.c_str(), request);
}
return request;
}
template <typename ResponseT>
void responsePart(Response &ret, const ResponseT &response, const std::string &operationName, const Context::Accepts &accepts)
{
std::ostringstream encodedReponse;
// Use specific codec if available
for (const auto & accept : accepts)
{
if (codec::Codec(encodedReponse, accept, response))
{
if (accept != "*/*") // Otherwise the Codec should set the contentType via the context
{
ret.contentType(accept);
}
break;
}
}
// Use generic codec
if (ret.contentType().empty())
for (const auto & accept : accepts)
{
if (codec::GenericEncoder(encodedReponse, accept, operationName, response))
{
ret.contentType(accept);
break;
}
}
if (ret.contentType().empty())
throw std::runtime_error("No appropriate codec available");
// TODO: Rework this interface
// If void is overwritten and returns false, use the default
// implementation (Assumes no char template is defined)
std::ostringstream wrappedReponse;
if (!codec::ResponseWrapper<void>(wrappedReponse, encodedReponse.str(), ret.contentType(), nullptr))
{
codec::ResponseWrapper<char>(wrappedReponse, encodedReponse.str(), ret.contentType(), nullptr);
}
ret.body(wrappedReponse.str());
}
//Empty responsePart
void responsePart(Response &ret, const std::string &operationName, const Context::Accepts &accepts);
//The default template is request / response
template <typename Type, typename OperationType, typename RequestT, typename ResponseT, class Strategy >
class OperationHandler: public Operation
{
Strategy _methodHandle;
OperationType _op;
public:
OperationHandler(OperationType op, const std::string &name): Operation(name), _op(op) {}
protected:
void process(Response &ret, const std::string ¶ms, const Context::Accepts &accepts) override
{
RequestT request = requestPart<RequestT>(_name, params);
Type &type(_methodHandle.Instance());
ResponseT response((type.*_op)(request));
responsePart(ret, response, _name, accepts);
}
};
//Partial specialization for Response only
template <typename Type, typename OperationType, typename ResponseT, class Strategy >
class OperationHandler<Type, OperationType, boost::mpl::void_, ResponseT, Strategy>: public Operation
{
Strategy _methodHandle;
OperationType _op;
public:
OperationHandler(OperationType op, const std::string &name): Operation(name), _op(op) {}
protected:
void process(Response &ret, const std::string &, const Context::Accepts &accepts) override
{
Type &type(_methodHandle.Instance());
ResponseT response((type.*_op)());
responsePart(ret, response, _name, accepts);
}
};
//Partial specialization for Request only
template <typename Type, typename OperationType, typename RequestT, class Strategy >
class OperationHandler<Type, OperationType, RequestT, void, Strategy>: public Operation
{
Strategy _methodHandle;
OperationType _op;
public:
OperationHandler(OperationType op, const std::string &name): Operation(name), _op(op) {}
protected:
void process(Response &ret, const std::string ¶ms, const Context::Accepts &accepts) override
{
RequestT request = requestPart<RequestT>(_name, params);
Type &type(_methodHandle.Instance());
(type.*_op)(request);
//TODO: Add empty return concept (JSON needs null or {}) xml needs empty node
responsePart(ret, _name, accepts);
}
};
namespace detail
{
// Declaration of general template
template<typename Pmf> struct class_;
// Partial specialisation for pointers to
// member functions
//Request response
template < typename Result, typename Class,
typename Arg >
struct class_<Result(Class:: *)(Arg)>
{
typedef Class type;
};
//Request response const
template < typename Result, typename Class,
typename Arg >
struct class_<Result(Class:: *)(Arg) const>
{
typedef Class type;
};
//Response
template<typename Result, typename Class>
struct class_<Result(Class:: *)()>
{
typedef Class type;
};
//Response const
template<typename Result, typename Class>
struct class_<Result(Class:: *)() const>
{
typedef Class type;
};
//Request
template<typename Class, typename Arg>
struct class_<void (Class:: *)(Arg)>
{
typedef Class type;
};
//Request const
template<typename Class, typename Arg>
struct class_<void (Class:: *)(Arg) const>
{
typedef Class type;
};
}// namespace detail
class OperationStore
{
std::map<std::string, std::unique_ptr<Operation>> _map;
OperationStore();
public:
static OperationStore &Instance();
void registerOperation(const std::string &serviceName, const std::string &operationName, std::unique_ptr< Operation > webMethodHandle);
Operation &operation(const std::string &serviceName, const std::string &operationName) const;
std::vector<std::string> operationList();
};
template <typename Operation>
class RegisterOperation
{
public:
RegisterOperation(const std::string &serviceName, const std::string &operationName, const Operation &op)
{
typedef typename detail::class_<Operation>::type Type;
typedef typename boost::function_types::result_type<Operation>::type
response;
typedef typename boost::mpl::at_c<boost::function_types::parameter_types<Operation>, 1>::type response_type;
using request = typename std::decay<response_type>::type;
typedef OperationHandler<Type, Operation, request, response, instantiation::InstantiationStrategy<Type>> Handler;
try
{
OperationStore::Instance() .registerOperation(serviceName, operationName, std::unique_ptr<Handler>(new Handler(op, operationName)));
}
catch (std::exception &ex)
{
std::cerr << "Error initializing operation [" << serviceName << "::" << operationName << "] : " << ex.what() << std::endl;
}
}
};
}
#define ANSWER_REGISTER(ServiceOperation, ServiceName) \
namespace {\
answer::RegisterOperation<BOOST_TYPEOF(&ServiceOperation)> ANSWER_MAKE_UNIQUE(_registrator_)(ServiceName, #ServiceOperation, &ServiceOperation);\
}
#define ANSWER_REGISTER_AS(ServiceOperation, OperationName, ServiceName) \
namespace {\
answer::RegisterOperation<BOOST_TYPEOF(&ServiceOperation)> ANSWER_MAKE_UNIQUE(_registrator_)(ServiceName, OperationName, &ServiceOperation);\
}
#endif //_OPERATION_HH_
| {
"content_hash": "98f7b24814c4c87d31be13f8047ad61a",
"timestamp": "",
"source": "github",
"line_count": 245,
"max_line_length": 145,
"avg_line_length": 28.722448979591835,
"alnum_prop": 0.7133721756430297,
"repo_name": "rfernandes/answer",
"id": "d4e8db0ae46d4092f65d1d884ab3d6d286dddef5",
"size": "7532",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "include/answer/Operation.hh",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "97604"
},
{
"name": "CMake",
"bytes": "15086"
},
{
"name": "Shell",
"bytes": "985"
}
],
"symlink_target": ""
} |
package TestModperl::request_rec_perlio_api;
# this test is relevant only when the PerlIO STDIN/STDOUT are used (when
# $Config{useperlio} is defined.)
use strict;
use warnings FATAL => 'all';
use Apache2::RequestIO ();
use Apache2::RequestRec ();
use Apache::Test;
use File::Spec::Functions qw(catfile catdir);
use Apache2::Const -compile => 'OK';
sub handler {
my $r = shift;
$r->args eq 'STDIN' ? test_STDIN($r) : test_STDOUT($r);
return Apache2::Const::OK;
}
sub test_STDIN {
my $r = shift;
{
# read the first 10 POST chars
my $data;
read STDIN, $data, 10;
print STDOUT $data;
}
{
# re-open STDIN to something else, and then see if we don't
# lose any chars when we restore it to the POST stream
open my $stdin, "<&STDIN" or die "Can't dup STDIN: $!";
open STDIN, "<", __FILE__
or die "failed to open STDIN as 'in memory' file : $!";
my $data;
read STDIN, $data, length("# please");
print STDOUT $data;
close STDIN;
open STDIN, "<&", $stdin or die "failed to restore STDIN: $!";
}
{
# read the last 10 POST chars
my $data;
read STDIN, $data, 10;
print STDOUT $data;
}
}
sub test_STDOUT {
my $r = shift;
local $| = 0;
print STDOUT "life is hard ";
my $vars = Apache::Test::config()->{vars};
my $target_dir = catdir $vars->{documentroot}, 'perlio';
my $file = catfile $target_dir, "apache_stdout";
# re-open STDOUT to something else, and then see if we can
# continue printing to the client via STDOUT, after restoring it
open my $stdout, ">&STDOUT" or die "Can't dup STDOUT: $!";
# this should flush the above print to STDOUT
open STDOUT, ">", $file or die "Can't open $file: $!";
print STDOUT "and then ";
close STDOUT;
# flush things that went into the file as STDOUT
open STDOUT, ">&", $stdout or die "failed to restore STDOUT: $!";
open my $fh, $file or die "Can't open $file: $!";
local $\;
print <$fh>;
# cleanup
unlink $file;
# close the dupped fh
close $stdout;
print "you die! ";
# now close it completely and restore it, without using any dupped
# filehandle
close STDOUT;
open STDOUT, ">:Apache2", $r
or die "can't open STDOUT via :Apache2 layer : $!";
print "next you reincarnate...";
}
1;
__DATA__
SetHandler perl-script
| {
"content_hash": "feb6e59f4c931f8f66f135c24dac03e0",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 72,
"avg_line_length": 23.76923076923077,
"alnum_prop": 0.5877831715210357,
"repo_name": "dreamhost/dpkg-ndn-perl-mod-perl",
"id": "c8a225f2addea51b3cec7915ffc564a319310bc8",
"size": "2617",
"binary": false,
"copies": "2",
"ref": "refs/heads/httpd24",
"path": "t/response/TestModperl/request_rec_perlio_api.pm",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "688577"
},
{
"name": "Groff",
"bytes": "273"
},
{
"name": "HTML",
"bytes": "51"
},
{
"name": "Max",
"bytes": "1581"
},
{
"name": "Perl",
"bytes": "2849754"
},
{
"name": "Perl6",
"bytes": "43726"
},
{
"name": "XS",
"bytes": "6901"
}
],
"symlink_target": ""
} |
require 'refinery/dashboard'
| {
"content_hash": "2983647e416c2f810d25760c2fd9a46e",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 28,
"avg_line_length": 29,
"alnum_prop": 0.8275862068965517,
"repo_name": "bhanuprasad143/refinerycms-dashboard",
"id": "4ca8b09cfb3519b6efb4a3eb03664672033a3f5d",
"size": "29",
"binary": false,
"copies": "13",
"ref": "refs/heads/master",
"path": "lib/refinerycms-dashboard.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "7794"
}
],
"symlink_target": ""
} |
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
Report of the Department of Agriculture, Government Research Institute of Formosa 27: 37-38 (1927)
#### Original name
Phytophthora tabaci Sawada, 1927
### Remarks
null | {
"content_hash": "f678829dc7509fa1d1b487ca9b5ce9f3",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 98,
"avg_line_length": 19.615384615384617,
"alnum_prop": 0.7529411764705882,
"repo_name": "mdoering/backbone",
"id": "17ea71e811df37e03f4998677f004ec0b2235613",
"size": "311",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Chromista/Oomycota/Oomycetes/Peronosporales/Peronosporaceae/Phytophthora/Phytophthora nicotianae/ Syn. Phytophthora tabaci/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v10/enums/change_status_resource_type.proto
package com.google.ads.googleads.v10.enums;
/**
* <pre>
* Container for enum describing supported resource types for the ChangeStatus
* resource.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum}
*/
public final class ChangeStatusResourceTypeEnum extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum)
ChangeStatusResourceTypeEnumOrBuilder {
private static final long serialVersionUID = 0L;
// Use ChangeStatusResourceTypeEnum.newBuilder() to construct.
private ChangeStatusResourceTypeEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ChangeStatusResourceTypeEnum() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ChangeStatusResourceTypeEnum();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeProto.internal_static_google_ads_googleads_v10_enums_ChangeStatusResourceTypeEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeProto.internal_static_google_ads_googleads_v10_enums_ChangeStatusResourceTypeEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum.class, com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum.Builder.class);
}
/**
* <pre>
* Enum listing the resource types support by the ChangeStatus resource.
* </pre>
*
* Protobuf enum {@code google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum.ChangeStatusResourceType}
*/
public enum ChangeStatusResourceType
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <pre>
* No value has been specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
UNSPECIFIED(0),
/**
* <pre>
* Used for return value only. Represents an unclassified resource unknown
* in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
* <pre>
* An AdGroup resource change.
* </pre>
*
* <code>AD_GROUP = 3;</code>
*/
AD_GROUP(3),
/**
* <pre>
* An AdGroupAd resource change.
* </pre>
*
* <code>AD_GROUP_AD = 4;</code>
*/
AD_GROUP_AD(4),
/**
* <pre>
* An AdGroupCriterion resource change.
* </pre>
*
* <code>AD_GROUP_CRITERION = 5;</code>
*/
AD_GROUP_CRITERION(5),
/**
* <pre>
* A Campaign resource change.
* </pre>
*
* <code>CAMPAIGN = 6;</code>
*/
CAMPAIGN(6),
/**
* <pre>
* A CampaignCriterion resource change.
* </pre>
*
* <code>CAMPAIGN_CRITERION = 7;</code>
*/
CAMPAIGN_CRITERION(7),
/**
* <pre>
* A Feed resource change.
* </pre>
*
* <code>FEED = 9;</code>
*/
FEED(9),
/**
* <pre>
* A FeedItem resource change.
* </pre>
*
* <code>FEED_ITEM = 10;</code>
*/
FEED_ITEM(10),
/**
* <pre>
* An AdGroupFeed resource change.
* </pre>
*
* <code>AD_GROUP_FEED = 11;</code>
*/
AD_GROUP_FEED(11),
/**
* <pre>
* A CampaignFeed resource change.
* </pre>
*
* <code>CAMPAIGN_FEED = 12;</code>
*/
CAMPAIGN_FEED(12),
/**
* <pre>
* An AdGroupBidModifier resource change.
* </pre>
*
* <code>AD_GROUP_BID_MODIFIER = 13;</code>
*/
AD_GROUP_BID_MODIFIER(13),
/**
* <pre>
* A SharedSet resource change.
* </pre>
*
* <code>SHARED_SET = 14;</code>
*/
SHARED_SET(14),
/**
* <pre>
* A CampaignSharedSet resource change.
* </pre>
*
* <code>CAMPAIGN_SHARED_SET = 15;</code>
*/
CAMPAIGN_SHARED_SET(15),
/**
* <pre>
* An Asset resource change.
* </pre>
*
* <code>ASSET = 16;</code>
*/
ASSET(16),
/**
* <pre>
* A CustomerAsset resource change.
* </pre>
*
* <code>CUSTOMER_ASSET = 17;</code>
*/
CUSTOMER_ASSET(17),
/**
* <pre>
* A CampaignAsset resource change.
* </pre>
*
* <code>CAMPAIGN_ASSET = 18;</code>
*/
CAMPAIGN_ASSET(18),
/**
* <pre>
* An AdGroupAsset resource change.
* </pre>
*
* <code>AD_GROUP_ASSET = 19;</code>
*/
AD_GROUP_ASSET(19),
UNRECOGNIZED(-1),
;
/**
* <pre>
* No value has been specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
public static final int UNSPECIFIED_VALUE = 0;
/**
* <pre>
* Used for return value only. Represents an unclassified resource unknown
* in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
* <pre>
* An AdGroup resource change.
* </pre>
*
* <code>AD_GROUP = 3;</code>
*/
public static final int AD_GROUP_VALUE = 3;
/**
* <pre>
* An AdGroupAd resource change.
* </pre>
*
* <code>AD_GROUP_AD = 4;</code>
*/
public static final int AD_GROUP_AD_VALUE = 4;
/**
* <pre>
* An AdGroupCriterion resource change.
* </pre>
*
* <code>AD_GROUP_CRITERION = 5;</code>
*/
public static final int AD_GROUP_CRITERION_VALUE = 5;
/**
* <pre>
* A Campaign resource change.
* </pre>
*
* <code>CAMPAIGN = 6;</code>
*/
public static final int CAMPAIGN_VALUE = 6;
/**
* <pre>
* A CampaignCriterion resource change.
* </pre>
*
* <code>CAMPAIGN_CRITERION = 7;</code>
*/
public static final int CAMPAIGN_CRITERION_VALUE = 7;
/**
* <pre>
* A Feed resource change.
* </pre>
*
* <code>FEED = 9;</code>
*/
public static final int FEED_VALUE = 9;
/**
* <pre>
* A FeedItem resource change.
* </pre>
*
* <code>FEED_ITEM = 10;</code>
*/
public static final int FEED_ITEM_VALUE = 10;
/**
* <pre>
* An AdGroupFeed resource change.
* </pre>
*
* <code>AD_GROUP_FEED = 11;</code>
*/
public static final int AD_GROUP_FEED_VALUE = 11;
/**
* <pre>
* A CampaignFeed resource change.
* </pre>
*
* <code>CAMPAIGN_FEED = 12;</code>
*/
public static final int CAMPAIGN_FEED_VALUE = 12;
/**
* <pre>
* An AdGroupBidModifier resource change.
* </pre>
*
* <code>AD_GROUP_BID_MODIFIER = 13;</code>
*/
public static final int AD_GROUP_BID_MODIFIER_VALUE = 13;
/**
* <pre>
* A SharedSet resource change.
* </pre>
*
* <code>SHARED_SET = 14;</code>
*/
public static final int SHARED_SET_VALUE = 14;
/**
* <pre>
* A CampaignSharedSet resource change.
* </pre>
*
* <code>CAMPAIGN_SHARED_SET = 15;</code>
*/
public static final int CAMPAIGN_SHARED_SET_VALUE = 15;
/**
* <pre>
* An Asset resource change.
* </pre>
*
* <code>ASSET = 16;</code>
*/
public static final int ASSET_VALUE = 16;
/**
* <pre>
* A CustomerAsset resource change.
* </pre>
*
* <code>CUSTOMER_ASSET = 17;</code>
*/
public static final int CUSTOMER_ASSET_VALUE = 17;
/**
* <pre>
* A CampaignAsset resource change.
* </pre>
*
* <code>CAMPAIGN_ASSET = 18;</code>
*/
public static final int CAMPAIGN_ASSET_VALUE = 18;
/**
* <pre>
* An AdGroupAsset resource change.
* </pre>
*
* <code>AD_GROUP_ASSET = 19;</code>
*/
public static final int AD_GROUP_ASSET_VALUE = 19;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ChangeStatusResourceType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static ChangeStatusResourceType forNumber(int value) {
switch (value) {
case 0: return UNSPECIFIED;
case 1: return UNKNOWN;
case 3: return AD_GROUP;
case 4: return AD_GROUP_AD;
case 5: return AD_GROUP_CRITERION;
case 6: return CAMPAIGN;
case 7: return CAMPAIGN_CRITERION;
case 9: return FEED;
case 10: return FEED_ITEM;
case 11: return AD_GROUP_FEED;
case 12: return CAMPAIGN_FEED;
case 13: return AD_GROUP_BID_MODIFIER;
case 14: return SHARED_SET;
case 15: return CAMPAIGN_SHARED_SET;
case 16: return ASSET;
case 17: return CUSTOMER_ASSET;
case 18: return CAMPAIGN_ASSET;
case 19: return AD_GROUP_ASSET;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<ChangeStatusResourceType>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
ChangeStatusResourceType> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<ChangeStatusResourceType>() {
public ChangeStatusResourceType findValueByNumber(int number) {
return ChangeStatusResourceType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum.getDescriptor().getEnumTypes().get(0);
}
private static final ChangeStatusResourceType[] VALUES = values();
public static ChangeStatusResourceType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private ChangeStatusResourceType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum.ChangeStatusResourceType)
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum)) {
return super.equals(obj);
}
com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum other = (com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum) obj;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Container for enum describing supported resource types for the ChangeStatus
* resource.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum)
com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnumOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeProto.internal_static_google_ads_googleads_v10_enums_ChangeStatusResourceTypeEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeProto.internal_static_google_ads_googleads_v10_enums_ChangeStatusResourceTypeEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum.class, com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum.Builder.class);
}
// Construct using com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeProto.internal_static_google_ads_googleads_v10_enums_ChangeStatusResourceTypeEnum_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum getDefaultInstanceForType() {
return com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum build() {
com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum buildPartial() {
com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum result = new com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum) {
return mergeFrom((com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum other) {
if (other == com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum)
private static final com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum();
}
public static com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ChangeStatusResourceTypeEnum>
PARSER = new com.google.protobuf.AbstractParser<ChangeStatusResourceTypeEnum>() {
@java.lang.Override
public ChangeStatusResourceTypeEnum parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ChangeStatusResourceTypeEnum> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ChangeStatusResourceTypeEnum> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v10.enums.ChangeStatusResourceTypeEnum getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| {
"content_hash": "8f6d59d1f742cfc4a74b718c3cf0985a",
"timestamp": "",
"source": "github",
"line_count": 813,
"max_line_length": 172,
"avg_line_length": 31.57318573185732,
"alnum_prop": 0.6625891152752347,
"repo_name": "googleads/google-ads-java",
"id": "45aed1f4b86c99afdc26cdadfc90ad5835465915",
"size": "25669",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "google-ads-stubs-v10/src/main/java/com/google/ads/googleads/v10/enums/ChangeStatusResourceTypeEnum.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "28701198"
}
],
"symlink_target": ""
} |
#include <stdio.h>
#include <stdlib.h>
#include "agent.h"
int main(void) {
IOTICAGENT *agent = ioticagent_init();
if(agent) {
ioticagent_setopt(agent, IOTICOPT_VERBOSE, 1);
ioticagent_setopt(agent, IOTICOPT_HOST, "http://localhost");
ioticagent_setopt(agent, IOTICOPT_PORT, 8118);
ioticagent_setopt(agent, IOTICOPT_HTTP_EPID, "a81ffa72113e29b9f1ff210dd9725249");
ioticagent_setopt(agent, IOTICOPT_HTTP_AUTHTOKEN, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
iotic_agent_payload payload;
if(ioticagent_payload_init(&payload) == IOTICE_OK) {
/* entity_list */
IoticAgentCode res = ioticagent_entity_list(agent, &payload);
if(res == IOTICE_OK) {
printf("\nentity_list OK: %s\n", payload.payload);
}
else {
/*if(ioticagent_geterr(agent, payloadbuf, 1024) == IOTICE_OK) {
// printf("got error: %s", payloadbuf);
}*/
}
ioticagent_payload_destroy(&payload);
}
if(ioticagent_payload_init(&payload) == IOTICE_OK) {
/* entity create */
IoticAgentCode res = ioticagent_entity_create(agent, "testthing", NULL, &payload);
if(res == IOTICE_OK) {
printf("\nentity_create OK: %s\n", payload.payload);
}
ioticagent_payload_destroy(&payload);
}
if(ioticagent_payload_init(&payload) == IOTICE_OK) {
/* point create */
IoticAgentCode res = ioticagent_point_create(agent, IOTICFOC_FEED, "testthing", "data", &payload);
if(res == IOTICE_OK) {
printf("\npoint_create OK: %s\n", payload.payload);
}
ioticagent_payload_destroy(&payload);
}
if(ioticagent_payload_init(&payload) == IOTICE_OK) {
/* point share */
IoticAgentCode res = ioticagent_point_share(agent, "testthing", "data", "timisafish", 10, &payload);
if(res == IOTICE_OK) {
printf("\npoint_share OK: %s\n", payload.payload);
}
ioticagent_payload_destroy(&payload);
}
if(ioticagent_payload_init(&payload) == IOTICE_OK) {
/* point share */
IoticAgentCode res = ioticagent_feeddata(agent, &payload);
if(res == IOTICE_OK) {
printf("\nfeeddata OK: %s\n", payload.payload);
}
ioticagent_payload_destroy(&payload);
}
if(ioticagent_payload_init(&payload) == IOTICE_OK) {
/* point share */
IoticAgentCode res = ioticagent_feeddata(agent, &payload);
if(res == IOTICE_OK) {
printf("OK: %s\n", payload.payload);
}
ioticagent_payload_destroy(&payload);
}
if(ioticagent_payload_init(&payload) == IOTICE_OK) {
/* entity delete */
IoticAgentCode res = ioticagent_entity_delete(agent, "testthing", &payload);
if(res == IOTICE_OK) {
printf("\nentity_delete OK: %s\n", payload.payload);
}
ioticagent_payload_destroy(&payload);
}
ioticagent_destroy(agent);
}
return 0;
}
| {
"content_hash": "376862e53577177900da4228e27ce922",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 126,
"avg_line_length": 37.235955056179776,
"alnum_prop": 0.5464695232347616,
"repo_name": "Iotic-Labs/IoticHttp",
"id": "bcfdd098f7a95e414dfacc6f9db175234e6d6866",
"size": "3498",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/c/examples/linux/http_entity_list.c",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "96475"
},
{
"name": "Shell",
"bytes": "6608"
}
],
"symlink_target": ""
} |
using System.IO;
using System.Text;
namespace Alphaleonis.Win32.Filesystem
{
internal static partial class NativeMethods
{
public static readonly bool IsAtLeastWindows8 = OperatingSystem.IsAtLeast(OperatingSystem.EnumOsName.Windows8);
public static readonly bool IsAtLeastWindows7 = OperatingSystem.IsAtLeast(OperatingSystem.EnumOsName.Windows7);
public static readonly bool IsAtLeastWindowsVista = OperatingSystem.IsAtLeast(OperatingSystem.EnumOsName.WindowsVista);
/// <summary>The FindFirstFileEx function does not query the short file name, improving overall enumeration speed.
/// <para> </para>
/// <remarks>
/// <para>The data is returned in a <see cref="WIN32_FIND_DATA"/> structure,</para>
/// <para>and cAlternateFileName member is always a NULL string.</para>
/// <para>This value is not supported until Windows Server 2008 R2 and Windows 7.</para>
/// </remarks>
/// </summary>
public static readonly FINDEX_INFO_LEVELS FindexInfoLevel = IsAtLeastWindows7 ? FINDEX_INFO_LEVELS.Basic : FINDEX_INFO_LEVELS.Standard;
/// <summary>Uses a larger buffer for directory queries, which can increase performance of the find operation.</summary>
/// <remarks>This value is not supported until Windows Server 2008 R2 and Windows 7.</remarks>
public static readonly FIND_FIRST_EX_FLAGS UseLargeCache = IsAtLeastWindows7 ? FIND_FIRST_EX_FLAGS.LARGE_FETCH : FIND_FIRST_EX_FLAGS.NONE;
/// <summary>DefaultFileBufferSize = 4096; Default type buffer size used for reading and writing files.</summary>
public const int DefaultFileBufferSize = 4096;
/// <summary>DefaultFileEncoding = Encoding.UTF8; Default type of Encoding used for reading and writing files.</summary>
public static readonly Encoding DefaultFileEncoding = Encoding.UTF8;
/// <summary>MaxDirectoryLength = 255</summary>
internal const int MaxDirectoryLength = 255;
/// <summary>MaxPath = 260
/// The specified path, file name, or both exceed the system-defined maximum length.
/// For example, on Windows-based platforms, paths must be less than 248 characters, and file names must be less than 260 characters.
/// </summary>
internal const int MaxPath = 260;
/// <summary>MaxPathUnicode = 32700</summary>
internal const int MaxPathUnicode = 32700;
/// <summary>When an exception is raised, bit shifting is needed to prevent: "System.OverflowException: Arithmetic operation resulted in an overflow."</summary>
internal const int OverflowExceptionBitShift = 65535;
/// <summary>Invalid FileAttributes = -1</summary>
internal const FileAttributes InvalidFileAttributes = (FileAttributes) (-1);
/// <summary>MAXIMUM_REPARSE_DATA_BUFFER_SIZE = 16384</summary>
internal const int MAXIMUM_REPARSE_DATA_BUFFER_SIZE = 16384;
/// <summary>REPARSE_DATA_BUFFER_HEADER_SIZE = 8</summary>
internal const int REPARSE_DATA_BUFFER_HEADER_SIZE = 8;
private const int DeviceIoControlMethodBuffered = 0;
private const int DeviceIoControlFileDeviceFileSystem = 9;
// <summary>Command to compression state of a file or directory on a volume whose file system supports per-file and per-directory compression.</summary>
internal const int FSCTL_SET_COMPRESSION = (DeviceIoControlFileDeviceFileSystem << 16) | (16 << 2) | DeviceIoControlMethodBuffered | (int) (FileAccess.Read | FileAccess.Write) << 14;
// <summary>Command to set the reparse point data block.</summary>
internal const int FSCTL_SET_REPARSE_POINT = (DeviceIoControlFileDeviceFileSystem << 16) | (41 << 2) | DeviceIoControlMethodBuffered | (0 << 14);
/// <summary>Command to delete the reparse point data base.</summary>
internal const int FSCTL_DELETE_REPARSE_POINT = (DeviceIoControlFileDeviceFileSystem << 16) | (43 << 2) | DeviceIoControlMethodBuffered | (0 << 14);
/// <summary>Command to get the reparse point data block.</summary>
internal const int FSCTL_GET_REPARSE_POINT = (DeviceIoControlFileDeviceFileSystem << 16) | (42 << 2) | DeviceIoControlMethodBuffered | (0 << 14);
}
}
| {
"content_hash": "3e042d1e92bdb23ca396c06d4b6772de",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 188,
"avg_line_length": 53.31645569620253,
"alnum_prop": 0.7188983855650523,
"repo_name": "alphaleonis/AlphaFS",
"id": "8590b00e96bdb806a3e1b9cef2e1073deeea4026",
"size": "5403",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/AlphaFS/Filesystem/Native Methods/NativeMethods.Constants.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "5249993"
},
{
"name": "PowerShell",
"bytes": "8016"
}
],
"symlink_target": ""
} |
package edu.isi.karma.kr2rml.planning;
public class RootStrategy {
protected RootStrategy backupStrategy;
public RootStrategy()
{
}
public RootStrategy(RootStrategy backupStrategy)
{
this.backupStrategy = backupStrategy;
}
public String findRoot(TriplesMapGraph graph){
if(graph.getTriplesMapIds().isEmpty())
{
return null;
}
else
{
return graph.getTriplesMapIds().iterator().next();
}
}
}
| {
"content_hash": "e678be3e0d3a4320a242ee4388efa991",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 53,
"avg_line_length": 16.92,
"alnum_prop": 0.7257683215130024,
"repo_name": "tushart91/spring-usc",
"id": "6f5ddeb3ac70491df8ac80ff7304fca426d220f2",
"size": "1485",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "Information Integration/Homework/HW8/Web-Karma-master/karma-common/src/main/java/edu/isi/karma/kr2rml/planning/RootStrategy.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "175970"
},
{
"name": "HTML",
"bytes": "4552572"
},
{
"name": "Java",
"bytes": "4721453"
},
{
"name": "JavaScript",
"bytes": "933366"
},
{
"name": "Makefile",
"bytes": "575"
},
{
"name": "Python",
"bytes": "1094563"
},
{
"name": "TeX",
"bytes": "17749"
},
{
"name": "Web Ontology Language",
"bytes": "6560"
}
],
"symlink_target": ""
} |
package com.linusblom.dart2.models.dartboard.shape;
import javafx.scene.shape.Path;
public class CustomPath extends Path {
private double centerX;
private double centerY;
public CustomPath(double centerX, double centerY) {
this.centerX = centerX;
this.centerY = centerY;
}
public double getCenterX() {
return centerX;
}
public double getCenterY() {
return centerY;
}
}
| {
"content_hash": "8bcefe8f3d5919182f17b77cca6debc7",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 55,
"avg_line_length": 19.954545454545453,
"alnum_prop": 0.6605922551252847,
"repo_name": "linusblom/Dart2",
"id": "cbff79dca16177a024ddf7da2b2c63a664e3695d",
"size": "439",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/com/linusblom/dart2/models/dartboard/shape/CustomPath.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "3661"
},
{
"name": "Java",
"bytes": "100234"
}
],
"symlink_target": ""
} |
#include <vector>
//========================================================================================================
struct VideoPathInfo
{
VideoPathInfo(){}
VideoPathInfo(const TString &path)
{
filePath = path;
int len = ::GetFileTitle(path.c_str(), NULL, 0);
TCHAR *buf = new TCHAR[len];
::GetFileTitle(path.c_str(), buf, len);
fileTitle = buf;
delete [] buf;
}
TString filePath;
TString fileTitle;
};
//========================================================================================================
class InfoDlg : public Dialog
{
public:
InfoDlg(){}
~InfoDlg(){}
protected:
virtual BOOL CALLBACK runProc(UINT uMsg, WPARAM wParam, LPARAM lParam);
void OnDropFiles(HDROP hDrop);
void OnKeyDown(UINT vk, int cRepeat, UINT flags);
void OnCommand(int id, HWND hwndCtl, UINT codeNotify);
void AddToComboBox(const TString &fileTitle); //Ôö¼ÓÊÓÆµ±êÌâµ½ÏÂÀ¿ò
bool AllInfoToSingleFile(TString filePath);
TString SelectAFolder(const TCHAR *title); //Ñ¡ÔñÒ»¸öÎļþ¼Ð
void doOpenFile();
void doOpenFolder();
int SearchVideo(const TCHAR * folder);
int RegisterOpenWith(bool fRegister = true);
bool GetCustomText(TCHAR *pszText);
bool IsFileIncluded(const TCHAR *filePath);
bool WritePathToReg();
public:
TString ReadMediaInfo(const TCHAR *filePath, bool fUpdateToDlg = true);
bool CopyToClipbrd(LPCTSTR lpData);
bool WriteFile(TString filePath, LPVOID lpData, DWORD dwLen);
TString SelectFilePath(const TCHAR *fileToSave = NULL);
bool IsSupportedFormat(const TCHAR *filePath);
void beginSearch();
private:
TString _folderForSearching;
std::vector<VideoPathInfo> videoPath;
int iFile;
};
#endif | {
"content_hash": "632196ac8141faefb5953a0f1a1f5eec",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 106,
"avg_line_length": 26.859375,
"alnum_prop": 0.6212914485165794,
"repo_name": "timxx/MediaInfo",
"id": "46a4966442dcbe769ea39d259d90f9445f27faa0",
"size": "2141",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "MediaInfo/InfoDlg.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "89062"
},
{
"name": "C++",
"bytes": "66616"
},
{
"name": "Objective-C",
"bytes": "314"
}
],
"symlink_target": ""
} |
@implementation KFOWMWeatherModel
+ (JSONKeyMapper *)keyMapper
{
return [[JSONKeyMapper alloc] initWithDictionary:@{@"id" : @"weatherId", @"description" : @"weatherDescription", @"main": @"mainWeather"}];
}
@end
| {
"content_hash": "367c22f0de524aa410e1bbf3daf46cfc",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 143,
"avg_line_length": 20.09090909090909,
"alnum_prop": 0.6923076923076923,
"repo_name": "ricobeck/KFOpenWeatherMapAPI",
"id": "8420eb9754d4302ec399ee39233ae2dc2fef99a5",
"size": "1448",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "KFOpenWeatherMapAPI/Source/Models/KFOWMWeatherModel.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "80117"
},
{
"name": "Ruby",
"bytes": "1262"
}
],
"symlink_target": ""
} |
<?php
namespace App\Http;
use Illuminate\Foundation\Http\Kernel as HttpKernel;
class Kernel extends HttpKernel
{
/**
* The application's global HTTP middleware stack.
*
* These middleware are run during every request to your application.
*
* @var array
*/
protected $middleware = [
\Illuminate\Foundation\Http\Middleware\CheckForMaintenanceMode::class,
];
/**
* The application's route middleware groups.
*
* @var array
*/
protected $middlewareGroups = [
'web' => [
\App\Http\Middleware\EncryptCookies::class,
\Illuminate\Cookie\Middleware\AddQueuedCookiesToResponse::class,
\Illuminate\Session\Middleware\StartSession::class,
\Illuminate\View\Middleware\ShareErrorsFromSession::class,
\App\Http\Middleware\VerifyCsrfToken::class,
],
'api' => [
'throttle:60,1',
],
];
/**
* The application's route middleware.
*
* These middleware may be assigned to groups or used individually.
*
* @var array
*/
protected $routeMiddleware = [
'auth' => \App\Http\Middleware\Authenticate::class,
'auth.basic' => \Illuminate\Auth\Middleware\AuthenticateWithBasicAuth::class,
'can' => \Illuminate\Foundation\Http\Middleware\Authorize::class,
'guest' => \App\Http\Middleware\RedirectIfAuthenticated::class,
'throttle' => \Illuminate\Routing\Middleware\ThrottleRequests::class,
'user' => \App\Http\Middleware\RedirectIfNotUser::class,
'student' => \App\Http\Middleware\RedirectIfNotStudent::class,
'teacher' => \App\Http\Middleware\RedirectIfNotTeacher::class,
'mentor' => \App\Http\Middleware\RedirectIfNotMentor::class,
'notStudent' => \App\Http\Middleware\RedirectIfStudent::class,
'verify' => \App\Http\Middleware\Verificate::class,
'fullGuest' => \App\Http\Middleware\RedirectIfNotFullGuest::class
];
}
| {
"content_hash": "73061ac09b1f373d5bba7578d1ae23e9",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 85,
"avg_line_length": 33.63333333333333,
"alnum_prop": 0.6392467789890981,
"repo_name": "kerimovscreations/StudentPortal",
"id": "88367e05503c629bc83295d55e9a743d6823a241",
"size": "2018",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "app/Http/Kernel.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "1182"
},
{
"name": "CSS",
"bytes": "72"
},
{
"name": "HTML",
"bytes": "143881"
},
{
"name": "JavaScript",
"bytes": "434358"
},
{
"name": "PHP",
"bytes": "490685"
}
],
"symlink_target": ""
} |
Класс реализует VK API и авторизацию по OAuth протоколу.
По всем вопросам можно писать на <vladkens@yandex.ru>
### Использование
1. Подключите класс
require('VK.php');
2. Создайте объект VK
1. без авторизации
$vk = new VK\VK('{APP_ID}', '{API_SECRET}');
2. с авторизацией
$vk = new VK\VK('{APP_ID}', '{API_SECRET}', '{ACCESS_TOKEN}');
3. Если нужна авторизация
1. Получаем ссылку авторизации
$vk->getAuthorizeURL('{API_SETTINGS}', '{CALLBACK_URL}');
2. Получаем токен доступа по ключу из ссылки авторизации
$vk->getAccessToken('{CODE}');
3. Проверить авторизирован ли пользователь
$vk->isAuth(); // return bool
4. Используем API
$vk->api('{METHOD_NAME}', '{PARAMETERS}');
### Другие методы
* Установить версию API.
`$vk->setApiVersion({NUBMER});`
### Переменные
* `{APP_ID}` — ID приложения вконтакте.
* `{API_SECRET}` — Секретный код приложения.
* `{ACCESS_TOKEN}` — Токен доступа.
* `{API_SETTINGS}` — Запрашиваемые [права доступа](http://vk.com/developers.php?oid=-1&p=Права_доступа_приложений) приложения (через запятую).
* `{CALLBACK_URL}` — Адрес, на который будет передан `{CODE}`.
* `{CODE}` — Код для получения токена доступа.
* `{METHOD_NAME}` — Имя API метода. [Все методы](http://vk.com/developers.php?oid=-1&p=Описание_методов_API).
* `{PARAMETERS}` — Параметры соответствующего метода API.
\* Если нужно получить бесконечный токен, используете параметр `offline` в `{API_SETTINGS}`.
### Лицензия
[MIT](https://raw.github.com/vladkens/VK/master/LICENSE)
## English
The PHP class for vk.com API and to support OAuth.
You can ask me any questions by e-mail: <vladkens@yandex.ru>
### Use
1. Connect class
require('VK.php');
2. Create VK object
1. without authorization
$vk = new VK\VK('{APP_ID}', '{API_SECRET}');
2. with authorization
$vk = new VK\VK('{APP_ID}', '{API_SECRET}', '{ACCESS_TOKEN}');
3. If need authorization
1. Get authoriz link
$vk->getAuthorizeURL('{API_SETTINGS}', '{CALLBACK_URL}');
2. Get the token access by code from the authoriz link
$vk->getAccessToken('{CODE}');
3. Check the status of authorization
$vk->isAuth(); // return bool
4. Usage API
$vk->api('{METHOD_NAME}', '{PARAMETERS}');
### Other methods
* Set version of API.
`$vk->setApiVersion({NUBMER});`
### Variables
* `{APP_ID}` — Your application's identifier.
* `{API_SECRET}` — Secret application key.
* `{ACCESS_TOKEN}` — Access token.
* `{API_SETTINGS}` — Access [rights requested](http://vk.com/developers.php?oid=-17680044&p=Application_Access_Rights) by your app (through comma).
* `{CALLBACK_URL}` — Address to which `{CODE}` will be rendered.
* `{CODE}` — The code to get access token.
* `{METHOD_NAME}` — Name of the API method. [All methods.](http://vk.com/developers.php?oid=-17680044&p=API_Method_Description)
* `{PARAMETERS}` — Parameters of the corresponding API methods.
\* If you need infinite token use key `offline` in `{API_SETTINGS}`.
### License
[MIT](https://raw.github.com/vladkens/VK/master/LICENSE) | {
"content_hash": "a9c9eb869d51e76625c768faa285289b",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 148,
"avg_line_length": 30.289719626168225,
"alnum_prop": 0.6334464671397717,
"repo_name": "maksa988/VK-1",
"id": "771c0e107ebd4173b0c0691a7bea34d985ee5d09",
"size": "3910",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "7140"
}
],
"symlink_target": ""
} |
<?php
namespace Cheetahmail\Campaigns\SmsUnit;
class GetResponse
{
/**
* @var UnitSMSCampaign $GetResult
*/
protected $GetResult = null;
/**
* @param UnitSMSCampaign $GetResult
*/
public function __construct($GetResult)
{
$this->GetResult = $GetResult;
}
/**
* @return UnitSMSCampaign
*/
public function getGetResult()
{
return $this->GetResult;
}
/**
* @param UnitSMSCampaign $GetResult
* @return \Cheetahmail\Campaigns\SmsUnit\GetResponse
*/
public function setGetResult($GetResult)
{
$this->GetResult = $GetResult;
return $this;
}
}
| {
"content_hash": "1b5a8010ba9c3462f50011671d0f3a89",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 57,
"avg_line_length": 18.216216216216218,
"alnum_prop": 0.5830860534124629,
"repo_name": "pgrimaud/cheetahmail-sdk",
"id": "a386529906f3583d53229e259c35d09e6cafcdce",
"size": "674",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Campaigns/SmsUnit/GetResponse.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "746810"
}
],
"symlink_target": ""
} |
'use strict';
var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; desc = parent = getter = undefined; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) subClass.__proto__ = superClass; }
var _leaflet = require('leaflet');
var _leaflet2 = _interopRequireDefault(_leaflet);
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
jest.dontMock('../MapComponent');
jest.dontMock('../MapLayer');
jest.dontMock('../Map');
var MapLayer = require('../MapLayer');
var Map = require('../Map');
describe('MapLayer', function () {
it('passes its `map` prop to its children', function () {
document.body.innerHTML = '<div id="test"></div>';
var Component = (function (_MapLayer) {
_inherits(Component, _MapLayer);
function Component() {
_classCallCheck(this, Component);
_get(Object.getPrototypeOf(Component.prototype), 'constructor', this).apply(this, arguments);
}
_createClass(Component, [{
key: 'componentWillMount',
value: function componentWillMount() {
_get(Object.getPrototypeOf(Component.prototype), 'componentWillMount', this).call(this);
expect(this.props.map).toBeDefined();
this.leafletElement = _leaflet2['default'].marker([0, 0]);
}
}, {
key: 'render',
value: function render() {
var children = this.getClonedChildrenWithMap({ parent: true });
return _react2['default'].createElement(
'div',
null,
children
);
}
}]);
return Component;
})(MapLayer);
var ChildComponent = (function (_React$Component) {
_inherits(ChildComponent, _React$Component);
function ChildComponent() {
_classCallCheck(this, ChildComponent);
_get(Object.getPrototypeOf(ChildComponent.prototype), 'constructor', this).apply(this, arguments);
}
_createClass(ChildComponent, [{
key: 'componentWillMount',
value: function componentWillMount() {
expect(this.props.map).toBeDefined();
expect(this.props.parent).toBe(true);
}
}, {
key: 'render',
value: function render() {
return null;
}
}]);
return ChildComponent;
})(_react2['default'].Component);
var component = _react2['default'].createElement(
Map,
null,
_react2['default'].createElement(
Component,
null,
_react2['default'].createElement(ChildComponent, null)
)
);
var instance = _react2['default'].render(component, document.getElementById('test'));
});
}); | {
"content_hash": "4d30505d6bbc515b26f50489f9148ccb",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 667,
"avg_line_length": 44.313131313131315,
"alnum_prop": 0.6469113289263734,
"repo_name": "KABA-CCEAC/react-leaflet",
"id": "c2815d39b5d203890329eb1ba6bd25f56543157b",
"size": "4387",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/__tests__/MapLayer.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "675"
},
{
"name": "JavaScript",
"bytes": "1224814"
}
],
"symlink_target": ""
} |
package test
import (
"bytes"
"encoding/json"
"fmt"
"strconv"
"testing"
jsoniter "github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func Test_read_float(t *testing.T) {
inputs := []string{
`1.1`, `1000`, `9223372036854775807`, `12.3`, `-12.3`, `720368.54775807`, `720368.547758075`,
`1e1`, `1e+1`, `1e-1`, `1E1`, `1E+1`, `1E-1`, `-1e1`, `-1e+1`, `-1e-1`,
}
for _, input := range inputs {
// non-streaming
t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
should := require.New(t)
iter := jsoniter.ParseString(jsoniter.ConfigDefault, input+",")
expected, err := strconv.ParseFloat(input, 32)
should.Nil(err)
should.Equal(float32(expected), iter.ReadFloat32())
})
t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
should := require.New(t)
iter := jsoniter.ParseString(jsoniter.ConfigDefault, input+",")
expected, err := strconv.ParseFloat(input, 64)
should.Nil(err)
should.Equal(expected, iter.ReadFloat64())
})
// streaming
t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
should := require.New(t)
iter := jsoniter.Parse(jsoniter.ConfigDefault, bytes.NewBufferString(input+","), 2)
expected, err := strconv.ParseFloat(input, 32)
should.Nil(err)
should.Equal(float32(expected), iter.ReadFloat32())
})
t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
should := require.New(t)
iter := jsoniter.Parse(jsoniter.ConfigDefault, bytes.NewBufferString(input+","), 2)
val := float64(0)
err := json.Unmarshal([]byte(input), &val)
should.Nil(err)
should.Equal(val, iter.ReadFloat64())
})
}
}
func Test_write_float32(t *testing.T) {
vals := []float32{0, 1, -1, 99, 0xff, 0xfff, 0xffff, 0xfffff, 0xffffff, 0x4ffffff, 0xfffffff,
-0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001}
for _, val := range vals {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := jsoniter.NewStream(jsoniter.ConfigDefault, buf, 4096)
stream.WriteFloat32Lossy(val)
stream.Flush()
should.Nil(stream.Error)
output, err := json.Marshal(val)
should.Nil(err)
should.Equal(string(output), buf.String())
})
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := jsoniter.NewStream(jsoniter.ConfigDefault, buf, 4096)
stream.WriteVal(val)
stream.Flush()
should.Nil(stream.Error)
output, err := json.Marshal(val)
should.Nil(err)
should.Equal(string(output), buf.String())
})
}
should := require.New(t)
buf := &bytes.Buffer{}
stream := jsoniter.NewStream(jsoniter.ConfigDefault, buf, 10)
stream.WriteRaw("abcdefg")
stream.WriteFloat32Lossy(1.123456)
stream.Flush()
should.Nil(stream.Error)
should.Equal("abcdefg1.123456", buf.String())
stream = jsoniter.NewStream(jsoniter.ConfigDefault, nil, 0)
stream.WriteFloat32(float32(0.0000001))
should.Equal("1e-7", string(stream.Buffer()))
}
func Test_write_float64(t *testing.T) {
vals := []float64{0, 1, -1, 99, 0xff, 0xfff, 0xffff, 0xfffff, 0xffffff, 0x4ffffff, 0xfffffff,
-0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001}
for _, val := range vals {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := jsoniter.NewStream(jsoniter.ConfigDefault, buf, 4096)
stream.WriteFloat64Lossy(val)
stream.Flush()
should.Nil(stream.Error)
should.Equal(strconv.FormatFloat(val, 'f', -1, 64), buf.String())
})
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := jsoniter.NewStream(jsoniter.ConfigDefault, buf, 4096)
stream.WriteVal(val)
stream.Flush()
should.Nil(stream.Error)
should.Equal(strconv.FormatFloat(val, 'f', -1, 64), buf.String())
})
}
should := require.New(t)
buf := &bytes.Buffer{}
stream := jsoniter.NewStream(jsoniter.ConfigDefault, buf, 10)
stream.WriteRaw("abcdefg")
stream.WriteFloat64Lossy(1.123456)
stream.Flush()
should.Nil(stream.Error)
should.Equal("abcdefg1.123456", buf.String())
stream = jsoniter.NewStream(jsoniter.ConfigDefault, nil, 0)
stream.WriteFloat64(float64(0.0000001))
should.Equal("1e-7", string(stream.Buffer()))
}
| {
"content_hash": "4d78b5d391ab7eb823f457823f6063db",
"timestamp": "",
"source": "github",
"line_count": 130,
"max_line_length": 95,
"avg_line_length": 32.746153846153845,
"alnum_prop": 0.6673713883016209,
"repo_name": "json-iterator/go",
"id": "1f2c007506f810508f1d9cbae36ca3f8bd0b4d3d",
"size": "4257",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "value_tests/float_test.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "471847"
},
{
"name": "Shell",
"bytes": "666"
}
],
"symlink_target": ""
} |
<?php
/**
* Description
*
* @phpstub
*
* @param CairoPdfSurface $surface
* @param float $width
* @param float $height
*
* @return void What is returned on success and failure
*/
function cairo_pdf_surface_set_size($surface, $width, $height)
{
} | {
"content_hash": "ec6682f4a035dce5fb4ffb1e2769abaa",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 62,
"avg_line_length": 15.9375,
"alnum_prop": 0.6666666666666666,
"repo_name": "schmittjoh/php-stubs",
"id": "3adaa6e03f6d00fa2e2fc702cb82b2b8867b8251",
"size": "255",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "res/php/cairo/functions/cairo-pdf-surface-set-size.php",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PHP",
"bytes": "2203628"
}
],
"symlink_target": ""
} |
namespace autofill {
CardUnmaskPromptView* CardUnmaskPromptView::CreateAndShow(
CardUnmaskPromptController* controller) {
CardUnmaskPromptViewAndroid* view =
new CardUnmaskPromptViewAndroid(controller);
view->Show();
return view;
}
CardUnmaskPromptViewAndroid::CardUnmaskPromptViewAndroid(
CardUnmaskPromptController* controller)
: controller_(controller) {
}
CardUnmaskPromptViewAndroid::~CardUnmaskPromptViewAndroid() {
if (controller_)
controller_->OnUnmaskDialogClosed();
}
void CardUnmaskPromptViewAndroid::Show() {
JNIEnv* env = base::android::AttachCurrentThread();
ui::ViewAndroid* view_android =
controller_->GetWebContents()->GetNativeView();
ScopedJavaLocalRef<jstring> dialog_title =
base::android::ConvertUTF16ToJavaString(env,
controller_->GetWindowTitle());
ScopedJavaLocalRef<jstring> instructions =
base::android::ConvertUTF16ToJavaString(
env, controller_->GetInstructionsMessage());
java_object_.Reset(Java_CardUnmaskBridge_create(
env, reinterpret_cast<intptr_t>(this), dialog_title.obj(),
instructions.obj(),
ResourceMapper::MapFromChromiumId(controller_->GetCvcImageRid()),
controller_->ShouldRequestExpirationDate(),
controller_->CanStoreLocally(),
controller_->GetStoreLocallyStartState(),
view_android->GetWindowAndroid()->GetJavaObject().obj()));
Java_CardUnmaskBridge_show(env, java_object_.obj());
}
bool CardUnmaskPromptViewAndroid::CheckUserInputValidity(JNIEnv* env,
jobject obj,
jstring response) {
return controller_->InputCvcIsValid(
base::android::ConvertJavaStringToUTF16(env, response));
}
void CardUnmaskPromptViewAndroid::OnUserInput(JNIEnv* env,
jobject obj,
jstring cvc,
jstring month,
jstring year,
jboolean should_store_locally) {
controller_->OnUnmaskResponse(
base::android::ConvertJavaStringToUTF16(env, cvc),
base::android::ConvertJavaStringToUTF16(env, month),
base::android::ConvertJavaStringToUTF16(env, year),
should_store_locally);
}
void CardUnmaskPromptViewAndroid::PromptDismissed(JNIEnv* env, jobject obj) {
delete this;
}
void CardUnmaskPromptViewAndroid::ControllerGone() {
controller_ = nullptr;
JNIEnv* env = base::android::AttachCurrentThread();
Java_CardUnmaskBridge_dismiss(env, java_object_.obj());
}
void CardUnmaskPromptViewAndroid::DisableAndWaitForVerification() {
JNIEnv* env = base::android::AttachCurrentThread();
Java_CardUnmaskBridge_disableAndWaitForVerification(env, java_object_.obj());
}
void CardUnmaskPromptViewAndroid::GotVerificationResult(
const base::string16& error_message,
bool allow_retry) {
JNIEnv* env = base::android::AttachCurrentThread();
ScopedJavaLocalRef<jstring> message;
if (!error_message.empty())
message = base::android::ConvertUTF16ToJavaString(env, error_message);
Java_CardUnmaskBridge_verificationFinished(env, java_object_.obj(),
message.obj(), allow_retry);
}
// static
bool CardUnmaskPromptViewAndroid::Register(JNIEnv* env) {
return RegisterNativesImpl(env);
}
} // namespace autofill
| {
"content_hash": "2dec1905ccc18827b68b18a8cd5a9c69",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 79,
"avg_line_length": 36.729166666666664,
"alnum_prop": 0.6605218377765173,
"repo_name": "ltilve/chromium",
"id": "405f337d5cee0ee815c982fe255b3aefcb955bb3",
"size": "4059",
"binary": false,
"copies": "6",
"ref": "refs/heads/igalia-sidebar",
"path": "chrome/browser/ui/android/autofill/card_unmask_prompt_view_android.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "23829"
},
{
"name": "C",
"bytes": "4118701"
},
{
"name": "C++",
"bytes": "234094836"
},
{
"name": "CSS",
"bytes": "939350"
},
{
"name": "Emacs Lisp",
"bytes": "988"
},
{
"name": "HTML",
"bytes": "28170463"
},
{
"name": "Java",
"bytes": "9881553"
},
{
"name": "JavaScript",
"bytes": "19877257"
},
{
"name": "Makefile",
"bytes": "68017"
},
{
"name": "Objective-C",
"bytes": "1485658"
},
{
"name": "Objective-C++",
"bytes": "8718816"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "PLpgSQL",
"bytes": "177185"
},
{
"name": "Perl",
"bytes": "63937"
},
{
"name": "Protocol Buffer",
"bytes": "460217"
},
{
"name": "Python",
"bytes": "7973452"
},
{
"name": "Shell",
"bytes": "480424"
},
{
"name": "Standard ML",
"bytes": "4965"
},
{
"name": "XSLT",
"bytes": "418"
},
{
"name": "nesC",
"bytes": "18347"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<link rel="SHORTCUT ICON" href="../../../../../img/clover.ico" />
<link rel="stylesheet" href="../../../../../aui/css/aui.min.css" media="all"/>
<link rel="stylesheet" href="../../../../../aui/css/aui-experimental.min.css" media="all"/>
<!--[if IE 9]><link rel="stylesheet" href="../../../../../aui/css/aui-ie9.min.css" media="all"/><![endif]-->
<style type="text/css" media="all">
@import url('../../../../../style.css');
@import url('../../../../../tree.css');
</style>
<script src="../../../../../jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="../../../../../aui/js/aui.min.js" type="text/javascript"></script>
<script src="../../../../../aui/js/aui-experimental.min.js" type="text/javascript"></script>
<script src="../../../../../aui/js/aui-soy.min.js" type="text/javascript"></script>
<script src="../../../../../package-nodes-tree.js" type="text/javascript"></script>
<script src="../../../../../clover-tree.js" type="text/javascript"></script>
<script src="../../../../../clover.js" type="text/javascript"></script>
<script src="../../../../../clover-descriptions.js" type="text/javascript"></script>
<script src="../../../../../cloud.js" type="text/javascript"></script>
<title>ABA Route Transit Number Validator 1.0.1-SNAPSHOT</title>
</head>
<body>
<div id="page">
<header id="header" role="banner">
<nav class="aui-header aui-dropdown2-trigger-group" role="navigation">
<div class="aui-header-inner">
<div class="aui-header-primary">
<h1 id="logo" class="aui-header-logo aui-header-logo-clover">
<a href="http://openclover.org" title="Visit OpenClover home page"><span class="aui-header-logo-device">OpenClover</span></a>
</h1>
</div>
<div class="aui-header-secondary">
<ul class="aui-nav">
<li id="system-help-menu">
<a class="aui-nav-link" title="Open online documentation" target="_blank"
href="http://openclover.org/documentation">
<span class="aui-icon aui-icon-small aui-iconfont-help"> Help</span>
</a>
</li>
</ul>
</div>
</div>
</nav>
</header>
<div class="aui-page-panel">
<div class="aui-page-panel-inner">
<div class="aui-page-panel-nav aui-page-panel-nav-clover">
<div class="aui-page-header-inner" style="margin-bottom: 20px;">
<div class="aui-page-header-image">
<a href="http://cardatechnologies.com" target="_top">
<div class="aui-avatar aui-avatar-large aui-avatar-project">
<div class="aui-avatar-inner">
<img src="../../../../../img/clover_logo_large.png" alt="Clover icon"/>
</div>
</div>
</a>
</div>
<div class="aui-page-header-main" >
<h1>
<a href="http://cardatechnologies.com" target="_top">
ABA Route Transit Number Validator 1.0.1-SNAPSHOT
</a>
</h1>
</div>
</div>
<nav class="aui-navgroup aui-navgroup-vertical">
<div class="aui-navgroup-inner">
<ul class="aui-nav">
<li class="">
<a href="../../../../../dashboard.html">Project overview</a>
</li>
</ul>
<div class="aui-nav-heading packages-nav-heading">
<strong>Packages</strong>
</div>
<div class="aui-nav project-packages">
<form method="get" action="#" class="aui package-filter-container">
<input type="text" autocomplete="off" class="package-filter text"
placeholder="Type to filter packages..." name="package-filter" id="package-filter"
title="Start typing package name (or part of the name) to search through the tree. Use arrow keys and the Enter key to navigate."/>
</form>
<p class="package-filter-no-results-message hidden">
<small>No results found.</small>
</p>
<div class="packages-tree-wrapper" data-root-relative="../../../../../" data-package-name="com.cardatechnologies.utils.validators.abaroutevalidator">
<div class="packages-tree-container"></div>
<div class="clover-packages-lozenges"></div>
</div>
</div>
</div>
</nav> </div>
<section class="aui-page-panel-content">
<div class="aui-page-panel-content-clover">
<div class="aui-page-header-main"><ol class="aui-nav aui-nav-breadcrumbs">
<li><a href="../../../../../dashboard.html"> Project Clover database Sat Aug 7 2021 12:29:33 MDT</a></li>
<li><a href="test-pkg-summary.html">Package com.cardatechnologies.utils.validators.abaroutevalidator</a></li>
<li><a href="test-Test_AbaRouteValidator_05.html">Class Test_AbaRouteValidator_05</a></li>
</ol></div>
<h1 class="aui-h2-clover">
Test testAbaNumberCheck_8726_bad
</h1>
<table class="aui">
<thead>
<tr>
<th>Test</th>
<th><label title="The test result. Either a Pass, Fail or Error.">Status</label></th>
<th><label title="When the test execution was started">Start time</label></th>
<th><label title="The total time in seconds taken to run this test.">Time (seconds)</label></th>
<th><label title="A failure or error message if the test is not successful.">Message</label></th>
</tr>
</thead>
<tbody>
<tr>
<td>
<a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/Test_AbaRouteValidator_05.html?line=29464#src-29464" >testAbaNumberCheck_8726_bad</a>
</td>
<td>
<span class="sortValue">1</span><span class="aui-lozenge aui-lozenge-success">PASS</span>
</td>
<td>
7 Aug 12:34:49
</td>
<td>
0.0 </td>
<td>
<div></div>
<div class="errorMessage"></div>
</td>
</tr>
</tbody>
</table>
<div> </div>
<table class="aui aui-table-sortable">
<thead>
<tr>
<th style="white-space:nowrap;"><label title="A class that was directly hit by this test.">Target Class</label></th>
<th colspan="4"><label title="The percentage of coverage contributed by each single test.">Coverage contributed by</label> testAbaNumberCheck_8726_bad</th>
</tr>
</thead>
<tbody>
<tr>
<td>
<span class="sortValue">com.cardatechnologies.utils.validators.abaroutevalidator.exceptions.AbaRouteValidationException</span>
  <a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/exceptions/AbaRouteValidationException.html?id=14471#AbaRouteValidationException" title="AbaRouteValidationException" name="sl-43">com.cardatechnologies.utils.validators.abaroutevalidator.exceptions.AbaRouteValidationException</a>
</td>
<td>
<span class="sortValue">0.5714286</span>57.1%
</td>
<td class="align-middle" style="width: 100%" colspan="3">
<div>
<div title="57.1% Covered" style="min-width:40px;" class="barNegative contribBarNegative contribBarNegative"><div class="barPositive contribBarPositive contribBarPositive" style="width:57.1%"></div></div></div> </td>
</tr>
<tr>
<td>
<span class="sortValue">com.cardatechnologies.utils.validators.abaroutevalidator.ErrorCodes</span>
  <a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/ErrorCodes.html?id=14471#ErrorCodes" title="ErrorCodes" name="sl-42">com.cardatechnologies.utils.validators.abaroutevalidator.ErrorCodes</a>
</td>
<td>
<span class="sortValue">0.5714286</span>57.1%
</td>
<td class="align-middle" style="width: 100%" colspan="3">
<div>
<div title="57.1% Covered" style="min-width:40px;" class="barNegative contribBarNegative contribBarNegative"><div class="barPositive contribBarPositive contribBarPositive" style="width:57.1%"></div></div></div> </td>
</tr>
<tr>
<td>
<span class="sortValue">com.cardatechnologies.utils.validators.abaroutevalidator.AbaRouteValidator</span>
  <a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/AbaRouteValidator.html?id=14471#AbaRouteValidator" title="AbaRouteValidator" name="sl-47">com.cardatechnologies.utils.validators.abaroutevalidator.AbaRouteValidator</a>
</td>
<td>
<span class="sortValue">0.29411766</span>29.4%
</td>
<td class="align-middle" style="width: 100%" colspan="3">
<div>
<div title="29.4% Covered" style="min-width:40px;" class="barNegative contribBarNegative contribBarNegative"><div class="barPositive contribBarPositive contribBarPositive" style="width:29.4%"></div></div></div> </td>
</tr>
</tbody>
</table>
</div> <!-- class="aui-page-panel-content-clover" -->
<footer id="footer" role="contentinfo">
<section class="footer-body">
<ul>
<li>
Report generated by <a target="_new" href="http://openclover.org">OpenClover</a> v 4.4.1
on Sat Aug 7 2021 12:49:26 MDT using coverage data from Sat Aug 7 2021 12:47:23 MDT.
</li>
</ul>
<ul>
<li>OpenClover is free and open-source software. </li>
</ul>
</section>
</footer> </section> <!-- class="aui-page-panel-content" -->
</div> <!-- class="aui-page-panel-inner" -->
</div> <!-- class="aui-page-panel" -->
</div> <!-- id="page" -->
</body>
</html> | {
"content_hash": "511cf1907e026166f58187255446558a",
"timestamp": "",
"source": "github",
"line_count": 235,
"max_line_length": 359,
"avg_line_length": 46.740425531914894,
"alnum_prop": 0.5302257829570284,
"repo_name": "dcarda/aba.route.validator",
"id": "304b676feb89c8031f8d2ce36546ebb05a936806",
"size": "10984",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "target13/site/clover/com/cardatechnologies/utils/validators/abaroutevalidator/Test_AbaRouteValidator_05_testAbaNumberCheck_8726_bad_b5z.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "18715254"
}
],
"symlink_target": ""
} |
<?php
namespace Amp\Loop;
use Amp\Loop\Internal\Watcher;
use AsyncInterop\Loop\Driver;
use AsyncInterop\Loop\InvalidWatcherException;
abstract class Loop extends Driver {
// Don't use 1e3 / 1e6, they result in a float instead of int
const MILLISEC_PER_SEC = 1000;
const MICROSEC_PER_SEC = 1000000;
/** @var string */
private $nextId = "a";
/** @var \Amp\Loop\Internal\Watcher[] */
private $watchers = [];
/** @var \Amp\Loop\Internal\Watcher[] */
private $enableQueue = [];
/** @var \Amp\Loop\Internal\Watcher[] */
private $deferQueue = [];
/** @var \Amp\Loop\Internal\Watcher[] */
private $nextTickQueue = [];
/** @var callable|null */
private $errorHandler;
/** @var int */
private $running = 0;
/**
* {@inheritdoc}
*/
public function run() {
$previous = $this->running;
++$this->running;
try {
while ($this->running > $previous) {
if ($this->isEmpty()) {
return;
}
$this->tick();
}
} finally {
$this->running = $previous;
}
}
/**
* {@inheritdoc}
*/
public function stop() {
--$this->running > 0 ?: $this->running = 0;
}
/**
* @return bool True if no enabled and referenced watchers remain in the loop.
*/
private function isEmpty() {
foreach ($this->watchers as $watcher) {
if ($watcher->enabled && $watcher->referenced) {
return false;
}
}
return true;
}
/**
* Executes a single tick of the event loop.
*/
private function tick() {
$this->deferQueue = \array_merge($this->deferQueue, $this->nextTickQueue);
$this->nextTickQueue = [];
$this->activate($this->enableQueue);
$this->enableQueue = [];
try {
foreach ($this->deferQueue as $watcher) {
if (!isset($this->deferQueue[$watcher->id])) {
continue; // Watcher disabled by another defer watcher.
}
unset($this->watchers[$watcher->id], $this->deferQueue[$watcher->id]);
$callback = $watcher->callback;
$callback($watcher->id, $watcher->data);
}
$this->dispatch(empty($this->nextTickQueue) && empty($this->enableQueue) && $this->running);
} catch (\Throwable $exception) {
if (null === $this->errorHandler) {
throw $exception;
}
$errorHandler = $this->errorHandler;
$errorHandler($exception);
} catch (\Exception $exception) { // @todo Remove when PHP 5.x support is no longer needed.
if (null === $this->errorHandler) {
throw $exception;
}
$errorHandler = $this->errorHandler;
$errorHandler($exception);
}
}
/**
* Dispatches any pending read/write, timer, and signal events.
*
* @param bool $blocking
*/
abstract protected function dispatch($blocking);
/**
* Activates (enables) all the given watchers.
*
* @param \Amp\Loop\Internal\Watcher[] $watchers
*/
abstract protected function activate(array $watchers);
/**
* Deactivates (disables) the given watcher.
*
* @param \Amp\Loop\Internal\Watcher $watcher
*/
abstract protected function deactivate(Watcher $watcher);
/**
* {@inheritdoc}
*/
public function defer(callable $callback, $data = null) {
$watcher = new Watcher;
$watcher->type = Watcher::DEFER;
$watcher->id = $this->nextId++;
$watcher->callback = $callback;
$watcher->data = $data;
$this->watchers[$watcher->id] = $watcher;
$this->nextTickQueue[$watcher->id] = $watcher;
return $watcher->id;
}
/**
* {@inheritdoc}
*/
public function delay($delay, callable $callback, $data = null) {
$delay = (int) $delay;
if ($delay < 0) {
throw new \InvalidArgumentException("Delay must be greater than or equal to zero");
}
$watcher = new Watcher;
$watcher->type = Watcher::DELAY;
$watcher->id = $this->nextId++;
$watcher->callback = $callback;
$watcher->value = $delay;
$watcher->data = $data;
$this->watchers[$watcher->id] = $watcher;
$this->enableQueue[$watcher->id] = $watcher;
return $watcher->id;
}
/**
* {@inheritdoc}
*/
public function repeat($interval, callable $callback, $data = null) {
$interval = (int) $interval;
if ($interval < 0) {
throw new \InvalidArgumentException("Interval must be greater than or equal to zero");
}
$watcher = new Watcher;
$watcher->type = Watcher::REPEAT;
$watcher->id = $this->nextId++;
$watcher->callback = $callback;
$watcher->value = $interval;
$watcher->data = $data;
$this->watchers[$watcher->id] = $watcher;
$this->enableQueue[$watcher->id] = $watcher;
return $watcher->id;
}
/**
* {@inheritdoc}
*/
public function onReadable($stream, callable $callback, $data = null) {
$watcher = new Watcher;
$watcher->type = Watcher::READABLE;
$watcher->id = $this->nextId++;
$watcher->callback = $callback;
$watcher->value = $stream;
$watcher->data = $data;
$this->watchers[$watcher->id] = $watcher;
$this->enableQueue[$watcher->id] = $watcher;
return $watcher->id;
}
/**
* {@inheritdoc}
*/
public function onWritable($stream, callable $callback, $data = null) {
$watcher = new Watcher;
$watcher->type = Watcher::WRITABLE;
$watcher->id = $this->nextId++;
$watcher->callback = $callback;
$watcher->value = $stream;
$watcher->data = $data;
$this->watchers[$watcher->id] = $watcher;
$this->enableQueue[$watcher->id] = $watcher;
return $watcher->id;
}
/**
* {@inheritdoc}
*
* @throws \AsyncInterop\Loop\UnsupportedFeatureException If the pcntl extension is not available.
* @throws \RuntimeException If creating the backend signal handler fails.
*/
public function onSignal($signo, callable $callback, $data = null) {
$watcher = new Watcher;
$watcher->type = Watcher::SIGNAL;
$watcher->id = $this->nextId++;
$watcher->callback = $callback;
$watcher->value = $signo;
$watcher->data = $data;
$this->watchers[$watcher->id] = $watcher;
$this->enableQueue[$watcher->id] = $watcher;
return $watcher->id;
}
/**
* {@inheritdoc}
*/
public function enable($watcherIdentifier) {
if (!isset($this->watchers[$watcherIdentifier])) {
throw new InvalidWatcherException($watcherIdentifier, "Cannot enable an invalid watcher identifier: '{$watcherIdentifier}'");
}
$watcher = $this->watchers[$watcherIdentifier];
if ($watcher->enabled) {
return; // Watcher already enabled.
}
$watcher->enabled = true;
switch ($watcher->type) {
case Watcher::DEFER:
$this->nextTickQueue[$watcher->id] = $watcher;
break;
default:
$this->enableQueue[$watcher->id] = $watcher;
break;
}
}
/**
* {@inheritdoc}
*/
public function disable($watcherIdentifier) {
if (!isset($this->watchers[$watcherIdentifier])) {
return;
}
$watcher = $this->watchers[$watcherIdentifier];
if (!$watcher->enabled) {
return; // Watcher already disabled.
}
$watcher->enabled = false;
$id = $watcher->id;
switch ($watcher->type) {
case Watcher::DEFER:
if (isset($this->nextTickQueue[$id])) {
// Watcher was only queued to be enabled.
unset($this->nextTickQueue[$id]);
} else {
unset($this->deferQueue[$id]);
}
break;
default:
if (isset($this->enableQueue[$id])) {
// Watcher was only queued to be enabled.
unset($this->enableQueue[$id]);
} else {
$this->deactivate($watcher);
}
break;
}
}
/**
* {@inheritdoc}
*/
public function cancel($watcherIdentifier) {
$this->disable($watcherIdentifier);
unset($this->watchers[$watcherIdentifier]);
}
/**
* {@inheritdoc}
*/
public function reference($watcherIdentifier) {
if (!isset($this->watchers[$watcherIdentifier])) {
throw new InvalidWatcherException($watcherIdentifier, "Cannot reference an invalid watcher identifier: '{$watcherIdentifier}'");
}
$this->watchers[$watcherIdentifier]->referenced = true;
}
/**
* {@inheritdoc}
*/
public function unreference($watcherIdentifier) {
if (!isset($this->watchers[$watcherIdentifier])) {
throw new InvalidWatcherException($watcherIdentifier, "Cannot unreference an invalid watcher identifier: '{$watcherIdentifier}'");
}
$this->watchers[$watcherIdentifier]->referenced = false;
}
/**
* {@inheritdoc}
*/
public function setErrorHandler(callable $callback = null) {
$previous = $this->errorHandler;
$this->errorHandler = $callback;
return $previous;
}
/**
* {@inheritdoc}
*/
public function getInfo() {
$watchers = [
"referenced" => 0,
"unreferenced" => 0,
];
$defer = $delay = $repeat = $onReadable = $onWritable = $onSignal = [
"enabled" => 0,
"disabled" => 0,
];
foreach ($this->watchers as $watcher) {
switch ($watcher->type) {
case Watcher::READABLE: $array = &$onReadable; break;
case Watcher::WRITABLE: $array = &$onWritable; break;
case Watcher::SIGNAL: $array = &$onSignal; break;
case Watcher::DEFER: $array = &$defer; break;
case Watcher::DELAY: $array = &$delay; break;
case Watcher::REPEAT: $array = &$repeat; break;
default: throw new \DomainException("Unknown watcher type");
}
if ($watcher->enabled) {
++$array["enabled"];
if ($watcher->referenced) {
++$watchers["referenced"];
} else {
++$watchers["unreferenced"];
}
} else {
++$array["disabled"];
}
}
return [
"watchers" => $watchers,
"defer" => $defer,
"delay" => $delay,
"repeat" => $repeat,
"on_readable" => $onReadable,
"on_writable" => $onWritable,
"on_signal" => $onSignal,
"running" => (bool) $this->running,
];
}
/**
* Returns the same array of data as getInfo().
*
* @return array
*/
public function __debugInfo() {
return $this->getInfo();
}
}
| {
"content_hash": "916ea349f27f5cc6e79ce28d8b5c774a",
"timestamp": "",
"source": "github",
"line_count": 415,
"max_line_length": 142,
"avg_line_length": 27.980722891566266,
"alnum_prop": 0.5164485015501206,
"repo_name": "amphp/loop",
"id": "534397fbd419af61b97a8126ea0e807f35a98a49",
"size": "11612",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/Loop.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "43495"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<faces-config xmlns="http://xmlns.jcp.org/xml/ns/javaee"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/javaee http://xmlns.jcp.org/xml/ns/javaee/web-facesconfig_2_2.xsd"
version="2.2">
<application>
<resource-bundle>
<base-name>i18n.mensagens</base-name>
<var>msg</var>
</resource-bundle>
</application>
</faces-config>
| {
"content_hash": "6fbdc5a9a916d863f5811f549efbe121",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 115,
"avg_line_length": 35.833333333333336,
"alnum_prop": 0.7046511627906977,
"repo_name": "alissonwilker/br.org.j2ee-ref",
"id": "7e5053bcc57f49ea8b1177891b860648b49c1176",
"size": "430",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "j2ee-ref-libros-web/src/main/webapp/WEB-INF/faces-config.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "204"
},
{
"name": "HTML",
"bytes": "24623"
},
{
"name": "Java",
"bytes": "207176"
}
],
"symlink_target": ""
} |
package com.example.coolweather.db;
import org.litepal.crud.DataSupport;
/**
* Created by zxn on 2017-3-16.
*/
public class County extends DataSupport {
private int id;
private String countyName;
private String weatherId;
private int cityId;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getCountyName() {
return countyName;
}
public void setCountyName(String countyName) {
this.countyName = countyName;
}
public String getWeatherId() {
return weatherId;
}
public void setWeatherId(String weatherId) {
this.weatherId = weatherId;
}
public int getCityId() {
return cityId;
}
public void setCityId(int cityId) {
this.cityId = cityId;
}
}
| {
"content_hash": "908b1df0d0c3e3fcbdc1dd5bdb2e8723",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 50,
"avg_line_length": 18.622222222222224,
"alnum_prop": 0.6157517899761337,
"repo_name": "zxn901011/coolweather",
"id": "ce2bc442b16279a2865834bf4bfe9215ed1dadca",
"size": "838",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/java/com/example/coolweather/db/County.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "33871"
}
],
"symlink_target": ""
} |
jQuery(document).ready(function(){
var profilePicsDiv = document.getElementById('profile_pics');
var myclick=document.getElementById('amigos');
$("#amigos").click(function(){
if (typeof(FB) != 'undefined' && FB != null ) {
FB.getLoginStatus(function(response) {
if (!response.authResponse) {
profilePicsDiv.innerHTML = '<em>You are not connected</em>';
return;
}
FB.api({ method: 'friends.get' }, function(result) {
Log.info('friends.get response', result);
var markup = '';
var numFriends = result ? Math.min(5, result.length) : 0;
numFriends = result.length;
if (numFriends > 0) {
for (var i=0; i<numFriends; i++) {
markup += (
'<fb:profile-pic size="square" ' +
'uid="' + result[i] + '" ' +
'facebook-logo="true"' +
'></fb:profile-pic>'
);
}
}
profilePicsDiv.innerHTML = markup;
FB.XFBML.parse(profilePicsDiv);
});
});
}
});
});
| {
"content_hash": "3366e102f175f00f2f8612f4a190b616",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 72,
"avg_line_length": 34.31428571428572,
"alnum_prop": 0.4621149042464613,
"repo_name": "carlosbeatortega/sociedades",
"id": "4803129677e4c24c0defba7ac1bde228e4604ee3",
"size": "1201",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "web/js/myFacebook.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "358899"
},
{
"name": "JavaScript",
"bytes": "485611"
},
{
"name": "PHP",
"bytes": "550901"
},
{
"name": "Shell",
"bytes": "633"
}
],
"symlink_target": ""
} |
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Example - example-example59-production</title>
<script src="//ajax.googleapis.com/ajax/libs/angularjs/1.3.0/angular.min.js"></script>
</head>
<body ng-app="bindExample">
<script>
angular.module('bindExample', [])
.controller('ExampleController', ['$scope', function($scope) {
$scope.salutation = 'Hello';
$scope.name = 'World';
}]);
</script>
<div ng-controller="ExampleController">
Salutation: <input type="text" ng-model="salutation"><br>
Name: <input type="text" ng-model="name"><br>
<pre ng-bind-template="{{salutation}} {{name}}!"></pre>
</div>
</body>
</html> | {
"content_hash": "5da02f3bdd822d3b83e13a80fe3d911a",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 88,
"avg_line_length": 26.037037037037038,
"alnum_prop": 0.6187766714082503,
"repo_name": "jeros-mz/angular.mobile.prototype",
"id": "fafcc871d3f218423cfb44140ff043adec539bf0",
"size": "703",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "scripts/angular/docs/examples/example-example59/index-production.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "47048"
},
{
"name": "JavaScript",
"bytes": "227256"
}
],
"symlink_target": ""
} |
<!doctype html>
<html class="default no-js">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>LocationServices | @uirouter/react</title>
<meta name="description" content="">
<meta name="viewport" content="width=device-width, initial-scale=1">
<link rel="stylesheet" href="../assets/css/main.css">
<link rel="stylesheet" href="../assets/css/uirouter.css">
<script src="../assets/js/modernizr.js"></script>
<script src="../assets/js/reset.js"></script>
</head>
<body>
<header>
<div class="tsd-page-toolbar">
<div class="container">
<div class="table-wrap">
<div class="table-cell" id="tsd-search" data-index="../assets/js/search.js" data-base="..">
<div class="field">
<label for="tsd-search-field" class="tsd-widget search no-caption">Search</label>
<input id="tsd-search-field" type="text" />
</div>
<ul class="results">
<li class="state loading">Preparing search index...</li>
<li class="state failure">The search index is not available</li>
</ul>
<a href="../index.html" class="title">@uirouter/react</a>
</div>
<div class="table-cell" id="tsd-widgets">
<div id="tsd-filter">
<a href="#" class="tsd-widget options no-caption" data-toggle="options">Options</a>
<div class="tsd-filter-group">
<!--
<div class="tsd-select" id="tsd-filter-visibility">
<span class="tsd-select-label">All</span>
<ul class="tsd-select-list">
<li data-value="public">Public</li>
<li data-value="protected">Public/Protected</li>
<li data-value="private" class="selected">All</li>
</ul>
</div>
<input type="checkbox" id="tsd-filter-inherited" checked />
<label class="tsd-widget" for="tsd-filter-inherited">Inherited</label>
-->
<input type="checkbox" id="tsd-filter-externals" checked />
<label class="tsd-widget" for="tsd-filter-externals">Internal UI-Router API</label>
<!--
<input type="checkbox" id="tsd-filter-only-exported" />
<label class="tsd-widget" for="tsd-filter-only-exported">Only exported</label>
-->
</div>
</div>
<a href="#" class="tsd-widget menu no-caption" data-toggle="menu">Menu</a>
</div>
</div>
</div>
</div>
<div class="tsd-page-title">
<div class="container">
<ul class="tsd-breadcrumb">
<li>
<a href="../index.html">@uirouter/react</a>
</li>
<li>
<a href="../modules/common.html">common</a>
</li>
<li>
<a href="common.locationservices.html">LocationServices</a>
</li>
</ul>
<h1>Interface LocationServices</h1>
</div>
</div>
</header>
<div class="container container-main">
<div class="row">
<div class="col-8 col-content">
<section class="tsd-panel tsd-comment">
<div class="tsd-comment tsd-typography">
<div class="lead">
<p>Handles low level URL read/write</p>
</div>
<p>This service handles low level reads and updates of the URL and listens for url changes.
Implementors should pass these through to the underlying URL mechanism.
The underlying URL mechanism might be browser APIs, framework APIs, or some 3rd party URL management library.</p>
<p>UI-Router Core includes three basic implementations:</p>
<ul>
<li><a href="../classes/vanilla.pushstatelocationservice.html">PushStateLocationService</a></li>
<li><a href="../classes/vanilla.hashlocationservice.html">HashLocationService</a></li>
<li><a href="../classes/vanilla.memorylocationservice.html">MemoryLocationService</a></li>
</ul>
</div>
</section>
<section class="tsd-panel tsd-hierarchy">
<h3>Hierarchy</h3>
<ul class="tsd-hierarchy">
<li>
<a href="core.disposable.html" class="tsd-signature-type">Disposable</a>
<ul class="tsd-hierarchy">
<li>
<span class="target">LocationServices</span>
</li>
</ul>
</li>
</ul>
</section>
<section class="tsd-panel">
<h3>Implemented by</h3>
<ul class="tsd-hierarchy">
<li><a href="../classes/vanilla.baselocationservices.html" class="tsd-signature-type">BaseLocationServices</a></li>
<li><a href="../classes/vanilla.hashlocationservice.html" class="tsd-signature-type">HashLocationService</a></li>
<li><a href="../classes/vanilla.memorylocationservice.html" class="tsd-signature-type">MemoryLocationService</a></li>
<li><a href="../classes/vanilla.pushstatelocationservice.html" class="tsd-signature-type">PushStateLocationService</a></li>
<li><a href="../classes/url.urlservice.html" class="tsd-signature-type">UrlService</a></li>
</ul>
</section>
<section class="tsd-panel-group tsd-index-group">
<h2>Index</h2>
<section class="tsd-panel tsd-index-panel">
<div class="tsd-index-content">
<section class="tsd-index-section ">
<h3>Properties</h3>
<ul class="tsd-index-list">
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="common.locationservices.html#hash" class="tsd-kind-icon">hash</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="common.locationservices.html#onchange" class="tsd-kind-icon">on<wbr>Change</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="common.locationservices.html#path" class="tsd-kind-icon">path</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="common.locationservices.html#search" class="tsd-kind-icon">search</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="common.locationservices.html#url" class="tsd-kind-icon">url</a></li>
</ul>
</section>
<section class="tsd-index-section tsd-is-inherited">
<h3>Methods</h3>
<ul class="tsd-index-list">
<li class="tsd-kind-method tsd-parent-kind-interface tsd-is-inherited"><a href="common.locationservices.html#dispose" class="tsd-kind-icon">dispose</a></li>
</ul>
</section>
</div>
</section>
</section>
<section class="tsd-panel-group tsd-member-group ">
<h2>Properties</h2>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="hash" class="tsd-anchor"></a>
<!--
<h3>hash</h3>
-->
<div class="tsd-signature tsd-kind-icon">hash<span class="tsd-signature-symbol">:</span> <a href="" class="tsd-signature-type">hash</a> <div class="tsd-header">
<p> See: <a href="../classes/url.urlservice.html#hash">UrlService.hash</a> </p>
</div>
</div>
<div class="tsd-declaration">
<div class="tsd-comment tsd-typography">
<div class="lead">
<p>See: <a href="../classes/url.urlservice.html#hash">UrlService.hash</a></p>
</div>
</div>
</div>
<aside class="tsd-sources">
<ul>
<li>Defined in <a href="https://github.com/ui-router/core/blob/8ed691b/src/common/coreservices.ts#L68">ui-router-core/src/common/coreservices.ts:68</a></li>
</ul>
</aside>
</section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="onchange" class="tsd-anchor"></a>
<!--
<h3>on<wbr>Change</h3>
-->
<div class="tsd-signature tsd-kind-icon">on<wbr>Change<span class="tsd-signature-symbol">:</span> <a href="" class="tsd-signature-type">onChange</a> <div class="tsd-header">
<p> See: <a href="../classes/url.urlservice.html#onchange">UrlService.onChange</a> </p>
</div>
</div>
<div class="tsd-declaration">
<div class="tsd-comment tsd-typography">
<div class="lead">
<p>See: <a href="../classes/url.urlservice.html#onchange">UrlService.onChange</a></p>
</div>
</div>
</div>
<aside class="tsd-sources">
<ul>
<li>Defined in <a href="https://github.com/ui-router/core/blob/8ed691b/src/common/coreservices.ts#L69">ui-router-core/src/common/coreservices.ts:69</a></li>
</ul>
</aside>
</section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="path" class="tsd-anchor"></a>
<!--
<h3>path</h3>
-->
<div class="tsd-signature tsd-kind-icon">path<span class="tsd-signature-symbol">:</span> <a href="" class="tsd-signature-type">path</a> <div class="tsd-header">
<p> See: <a href="../classes/url.urlservice.html#path">UrlService.path</a> </p>
</div>
</div>
<div class="tsd-declaration">
<div class="tsd-comment tsd-typography">
<div class="lead">
<p>See: <a href="../classes/url.urlservice.html#path">UrlService.path</a></p>
</div>
</div>
</div>
<aside class="tsd-sources">
<ul>
<li>Defined in <a href="https://github.com/ui-router/core/blob/8ed691b/src/common/coreservices.ts#L66">ui-router-core/src/common/coreservices.ts:66</a></li>
</ul>
</aside>
</section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="search" class="tsd-anchor"></a>
<!--
<h3>search</h3>
-->
<div class="tsd-signature tsd-kind-icon">search<span class="tsd-signature-symbol">:</span> <a href="" class="tsd-signature-type">search</a> <div class="tsd-header">
<p> See: <a href="../classes/url.urlservice.html#search">UrlService.search</a> </p>
</div>
</div>
<div class="tsd-declaration">
<div class="tsd-comment tsd-typography">
<div class="lead">
<p>See: <a href="../classes/url.urlservice.html#search">UrlService.search</a></p>
</div>
</div>
</div>
<aside class="tsd-sources">
<ul>
<li>Defined in <a href="https://github.com/ui-router/core/blob/8ed691b/src/common/coreservices.ts#L67">ui-router-core/src/common/coreservices.ts:67</a></li>
</ul>
</aside>
</section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="url" class="tsd-anchor"></a>
<!--
<h3>url</h3>
-->
<div class="tsd-signature tsd-kind-icon">url<span class="tsd-signature-symbol">:</span> <a href="" class="tsd-signature-type">url</a> <div class="tsd-header">
<p> See: <a href="../classes/url.urlservice.html#url">UrlService.url</a> </p>
</div>
</div>
<div class="tsd-declaration">
<div class="tsd-comment tsd-typography">
<div class="lead">
<p>See: <a href="../classes/url.urlservice.html#url">UrlService.url</a></p>
</div>
</div>
</div>
<aside class="tsd-sources">
<ul>
<li>Defined in <a href="https://github.com/ui-router/core/blob/8ed691b/src/common/coreservices.ts#L65">ui-router-core/src/common/coreservices.ts:65</a></li>
</ul>
</aside>
</section>
</section>
<section class="tsd-panel-group tsd-member-group tsd-is-inherited">
<h2>Methods</h2>
<section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-interface tsd-is-inherited">
<a name="dispose" class="tsd-anchor"></a>
<!--
<h3>dispose</h3>
-->
<ul class="tsd-signatures tsd-kind-method tsd-parent-kind-interface tsd-is-inherited">
<li class="tsd-signature tsd-kind-icon">dispose<span class="tsd-signature-symbol">(</span>router<span class="tsd-signature-symbol">?: </span><a href="../classes/core.uirouter.html" class="tsd-signature-type">UIRouter</a><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span></li>
<li class="tsd-header">
<p> Instructs the Disposable to clean up any resources </p>
</li>
</ul>
<ul class="tsd-descriptions">
<li class="tsd-description">
<div class="tsd-comment tsd-typography">
<div class="lead">
<p>Instructs the Disposable to clean up any resources</p>
</div>
</div>
<h4 class="tsd-parameters-title">Parameters</h4>
<ul class="tsd-parameters">
<li>
<h5>router: <span class="tsd-flag ts-flagOptional">Optional</span> <a href="../classes/core.uirouter.html" class="tsd-signature-type">UIRouter</a></h5>
</li>
</ul>
<div class="tsd-returns">
<h4 class="tsd-returns-title">Returns <span class="tsd-signature-type">any</span></h4>
</div>
<hr>
<aside class="tsd-sources">
<p>Inherited from <a href="core.disposable.html">Disposable</a>.<a href="core.disposable.html#dispose">dispose</a></p>
<ul>
<li>Defined in <a href="https://github.com/ui-router/core/blob/8ed691b/src/interface.ts#L121">ui-router-core/src/interface.ts:121</a></li>
</ul>
</aside> </li>
</ul>
</section>
</section>
</div>
<div class="col-4 col-menu menu-sticky-wrap menu-highlight">
<nav class="tsd-navigation primary">
<ul>
<li class="globals ">
<a href="../index.html"><em>@uirouter/react</em></a>
</li>
<li class="label tsd-is-external">
<span>Public API</span>
</li>
<li class="current tsd-kind-external-module">
<a href="../modules/common.html">common</a>
</li>
<li class=" tsd-kind-external-module">
<a href="../modules/components.html">components</a>
</li>
<li class=" tsd-kind-external-module">
<a href="../modules/core.html">core</a>
</li>
<li class=" tsd-kind-external-module">
<a href="../modules/params.html">params</a>
</li>
<li class=" tsd-kind-external-module">
<a href="../modules/react.html">react</a>
</li>
<li class=" tsd-kind-external-module">
<a href="../modules/resolve.html">resolve</a>
</li>
<li class=" tsd-kind-external-module">
<a href="../modules/state.html">state</a>
</li>
<li class=" tsd-kind-external-module">
<a href="../modules/transition.html">transition</a>
</li>
<li class=" tsd-kind-external-module">
<a href="../modules/url.html">url</a>
</li>
<li class=" tsd-kind-external-module">
<a href="../modules/view.html">view</a>
</li>
<li class="label tsd-is-external">
<span>Internal UI-<wbr><wbr>Router API</span>
</li>
<li class=" tsd-kind-external-module tsd-is-external">
<a href="../modules/common_hof.html">common_<wbr>hof</a>
</li>
<li class=" tsd-kind-external-module tsd-is-external">
<a href="../modules/common_predicates.html">common_<wbr>predicates</a>
</li>
<li class=" tsd-kind-external-module tsd-is-external">
<a href="../modules/common_strings.html">common_<wbr>strings</a>
</li>
<li class=" tsd-kind-external-module tsd-is-external">
<a href="../modules/hooks.html">hooks</a>
</li>
<li class=" tsd-kind-external-module tsd-is-external">
<a href="../modules/path.html">path</a>
</li>
<li class=" tsd-kind-external-module tsd-is-external">
<a href="../modules/trace.html">trace</a>
</li>
<li class=" tsd-kind-external-module tsd-is-external">
<a href="../modules/vanilla.html">vanilla</a>
</li>
</ul>
</nav>
<nav class="tsd-navigation secondary menu-sticky">
<ul class="before-current">
</ul>
<ul class="current">
<li class="current tsd-kind-interface tsd-parent-kind-external-module">
<a href="common.locationservices.html" class="tsd-kind-icon">Location<wbr>Services</a>
<ul>
<li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="common.locationservices.html#hash" class="tsd-kind-icon">hash</a>
</li>
<li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="common.locationservices.html#onchange" class="tsd-kind-icon">on<wbr>Change</a>
</li>
<li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="common.locationservices.html#path" class="tsd-kind-icon">path</a>
</li>
<li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="common.locationservices.html#search" class="tsd-kind-icon">search</a>
</li>
<li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="common.locationservices.html#url" class="tsd-kind-icon">url</a>
</li>
<li class=" tsd-kind-method tsd-parent-kind-interface tsd-is-inherited">
<a href="common.locationservices.html#dispose" class="tsd-kind-icon">dispose</a>
</li>
</ul>
</li>
</ul>
<ul class="after-current">
</ul>
</nav>
</div>
</div>
</div>
<footer class="with-border-bottom">
<div class="container">
<h2>Legend</h2>
<div class="tsd-legend-group">
<ul class="tsd-legend">
<li class="tsd-kind-module"><span class="tsd-kind-icon">Module</span></li>
<li class="tsd-kind-object-literal"><span class="tsd-kind-icon">Object literal</span></li>
<li class="tsd-kind-variable"><span class="tsd-kind-icon">Variable</span></li>
<li class="tsd-kind-function"><span class="tsd-kind-icon">Function</span></li>
<li class="tsd-kind-function tsd-has-type-parameter"><span class="tsd-kind-icon">Function with type parameter</span></li>
<li class="tsd-kind-index-signature"><span class="tsd-kind-icon">Index signature</span></li>
<li class="tsd-kind-type-alias"><span class="tsd-kind-icon">Type alias</span></li>
</ul>
<ul class="tsd-legend">
<li class="tsd-kind-enum"><span class="tsd-kind-icon">Enumeration</span></li>
<li class="tsd-kind-enum-member"><span class="tsd-kind-icon">Enumeration member</span></li>
<li class="tsd-kind-property tsd-parent-kind-enum"><span class="tsd-kind-icon">Property</span></li>
<li class="tsd-kind-method tsd-parent-kind-enum"><span class="tsd-kind-icon">Method</span></li>
</ul>
<ul class="tsd-legend">
<li class="tsd-kind-interface"><span class="tsd-kind-icon">Interface</span></li>
<li class="tsd-kind-interface tsd-has-type-parameter"><span class="tsd-kind-icon">Interface with type parameter</span></li>
<li class="tsd-kind-constructor tsd-parent-kind-interface"><span class="tsd-kind-icon">Constructor</span></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><span class="tsd-kind-icon">Property</span></li>
<li class="tsd-kind-method tsd-parent-kind-interface"><span class="tsd-kind-icon">Method</span></li>
<li class="tsd-kind-index-signature tsd-parent-kind-interface"><span class="tsd-kind-icon">Index signature</span></li>
</ul>
<ul class="tsd-legend">
<li class="tsd-kind-class"><span class="tsd-kind-icon">Class</span></li>
<li class="tsd-kind-class tsd-has-type-parameter"><span class="tsd-kind-icon">Class with type parameter</span></li>
<li class="tsd-kind-constructor tsd-parent-kind-class"><span class="tsd-kind-icon">Constructor</span></li>
<li class="tsd-kind-property tsd-parent-kind-class"><span class="tsd-kind-icon">Property</span></li>
<li class="tsd-kind-method tsd-parent-kind-class"><span class="tsd-kind-icon">Method</span></li>
<li class="tsd-kind-accessor tsd-parent-kind-class"><span class="tsd-kind-icon">Accessor</span></li>
<li class="tsd-kind-index-signature tsd-parent-kind-class"><span class="tsd-kind-icon">Index signature</span></li>
</ul>
<ul class="tsd-legend">
<li class="tsd-kind-constructor tsd-parent-kind-class tsd-is-inherited"><span class="tsd-kind-icon">Inherited constructor</span></li>
<li class="tsd-kind-property tsd-parent-kind-class tsd-is-inherited"><span class="tsd-kind-icon">Inherited property</span></li>
<li class="tsd-kind-method tsd-parent-kind-class tsd-is-inherited"><span class="tsd-kind-icon">Inherited method</span></li>
<li class="tsd-kind-accessor tsd-parent-kind-class tsd-is-inherited"><span class="tsd-kind-icon">Inherited accessor</span></li>
</ul>
<ul class="tsd-legend">
<li class="tsd-kind-property tsd-parent-kind-class tsd-is-protected"><span class="tsd-kind-icon">Protected property</span></li>
<li class="tsd-kind-method tsd-parent-kind-class tsd-is-protected"><span class="tsd-kind-icon">Protected method</span></li>
<li class="tsd-kind-accessor tsd-parent-kind-class tsd-is-protected"><span class="tsd-kind-icon">Protected accessor</span></li>
</ul>
<ul class="tsd-legend">
<li class="tsd-kind-property tsd-parent-kind-class tsd-is-private"><span class="tsd-kind-icon">Private property</span></li>
<li class="tsd-kind-method tsd-parent-kind-class tsd-is-private"><span class="tsd-kind-icon">Private method</span></li>
<li class="tsd-kind-accessor tsd-parent-kind-class tsd-is-private"><span class="tsd-kind-icon">Private accessor</span></li>
</ul>
<ul class="tsd-legend">
<li class="tsd-kind-property tsd-parent-kind-class tsd-is-static"><span class="tsd-kind-icon">Static property</span></li>
<li class="tsd-kind-call-signature tsd-parent-kind-class tsd-is-static"><span class="tsd-kind-icon">Static method</span></li>
</ul>
</div>
</div>
</footer>
<div class="container tsd-generator">
<p>Generated using <a href="http://typedoc.io" target="_blank">TypeDoc</a></p>
</div>
<div class="overlay"></div>
<script src="../assets/js/main.js"></script>
<script>if (location.protocol == 'file:') document.write('<script src="../assets/js/search.js"><' + '/script>');</script>
</body>
</html> | {
"content_hash": "6f3969bd9881bc39863ee5125d1061fd",
"timestamp": "",
"source": "github",
"line_count": 460,
"max_line_length": 361,
"avg_line_length": 47.2304347826087,
"alnum_prop": 0.6235386173248643,
"repo_name": "ui-router/ui-router.github.io",
"id": "7b6392e5c1438f5e5e6f10d0d042a3593f0b3870",
"size": "21734",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_react_docs/0.8.7/interfaces/common.locationservices.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4640890"
},
{
"name": "HTML",
"bytes": "276915023"
},
{
"name": "JavaScript",
"bytes": "831953"
},
{
"name": "Ruby",
"bytes": "140"
},
{
"name": "SCSS",
"bytes": "66332"
},
{
"name": "Shell",
"bytes": "5090"
}
],
"symlink_target": ""
} |
<?php
namespace Checkout\Models\Payments;
use Checkout\Models\Address;
/**
* Payment method Fawry.
*
* @category SDK
* @package Checkout.com
* @author Platforms Development Team <platforms@checkout.com>
* @license https://opensource.org/licenses/mit-license.html MIT License
* @link https://docs.checkout.com/
*/
class FawrySource extends IdSource
{
/**
* Qualified name of the class.
*
* @var string
*/
const QUALIFIED_NAME = __CLASS__;
/**
* Qualified namespace of the class.
*
* @var string
*/
const QUALIFIED_NAMESPACE = __NAMESPACE__;
/**
* Name of the model.
*
* @var string
*/
const MODEL_NAME = 'fawry';
/**
* Magic Methods
*/
/**
* Initialise payment Fawry.
*
* @param string $email The customer's email address
* @param string $mobile The customer's mobile number
* @param string $description The description of the payment
* @param Product[] $products This object is passed directly to Fawry as products.
*/
public function __construct($email, $mobile, $description, array $products)
{
$this->type = static::MODEL_NAME;
$this->customer_email = $email;
$this->customer_mobile = $mobile;
$this->description = $description;
$this->products = $products;
}
}
| {
"content_hash": "b1a6683d4d34b9b185a2e5b9808b52a5",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 100,
"avg_line_length": 22.984126984126984,
"alnum_prop": 0.574585635359116,
"repo_name": "checkout/checkout-magento-plugin",
"id": "978d03f6a0b10a4607167ad5fc1981e3b714e484",
"size": "1889",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/checkout-sdk-php/src/Models/Payments/FawrySource.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "33327"
},
{
"name": "Gherkin",
"bytes": "7657"
},
{
"name": "HTML",
"bytes": "67262"
},
{
"name": "JavaScript",
"bytes": "30812"
},
{
"name": "PHP",
"bytes": "410282"
}
],
"symlink_target": ""
} |
module Gesmew
module AuthenticationHelpers
def self.included(receiver)
receiver.send :helper_method, :gesmew_current_user
receiver.send :helper_method, :gesmew_login_path
receiver.send :helper_method, :gesmew_signup_path
receiver.send :helper_method, :gesmew_logout_path
end
def gesmew_current_user
current_gesmew_user
end
def gesmew_login_path
gesmew.login_path
end
def gesmew_signup_path
gesmew.signup_path
end
def gesmew_logout_path
gesmew.logout_path
end
end
end
| {
"content_hash": "2beb3618c85e61e1fa2538de3c69693f",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 56,
"avg_line_length": 21.653846153846153,
"alnum_prop": 0.6802841918294849,
"repo_name": "mjgumbs/gesmew_auth_devise",
"id": "600ddbc044b0ca500c5df34d2c3dab392c604ed1",
"size": "563",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/gesmew/authentication_helpers.rb",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "14023"
},
{
"name": "Ruby",
"bytes": "52919"
}
],
"symlink_target": ""
} |
package com.microsoftopentechnologies.tooling.msservices.helpers.azure.sdk;
import com.google.common.base.Strings;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import com.microsoft.azure.storage.CloudStorageAccount;
import com.microsoft.azure.storage.blob.CloudBlobClient;
import com.microsoft.azure.storage.blob.CloudBlobContainer;
import com.microsoft.windowsazure.Configuration;
import com.microsoft.windowsazure.core.OperationResponse;
import com.microsoft.windowsazure.core.OperationStatus;
import com.microsoft.windowsazure.core.OperationStatusResponse;
import com.microsoft.windowsazure.core.pipeline.apache.ApacheConfigurationProperties;
import com.microsoft.windowsazure.core.utils.Base64;
import com.microsoft.windowsazure.core.utils.KeyStoreType;
import com.microsoft.windowsazure.exception.ServiceException;
import com.microsoft.windowsazure.management.*;
import com.microsoft.windowsazure.management.compute.*;
import com.microsoft.windowsazure.management.compute.models.*;
import com.microsoft.windowsazure.management.configuration.ManagementConfiguration;
import com.microsoft.windowsazure.management.models.AffinityGroupListResponse;
import com.microsoft.windowsazure.management.models.LocationsListResponse;
import com.microsoft.windowsazure.management.models.RoleSizeListResponse;
import com.microsoft.windowsazure.management.network.NetworkManagementClient;
import com.microsoft.windowsazure.management.network.NetworkManagementService;
import com.microsoft.windowsazure.management.network.NetworkOperations;
import com.microsoft.windowsazure.management.network.models.NetworkListResponse;
import com.microsoft.windowsazure.management.storage.StorageAccountOperations;
import com.microsoft.windowsazure.management.storage.StorageManagementClient;
import com.microsoft.windowsazure.management.storage.StorageManagementService;
import com.microsoft.windowsazure.management.storage.models.*;
import com.microsoftopentechnologies.tooling.msservices.components.DefaultLoader;
import com.microsoftopentechnologies.tooling.msservices.helpers.NotNull;
import com.microsoftopentechnologies.tooling.msservices.helpers.Nullable;
import com.microsoftopentechnologies.tooling.msservices.helpers.OpenSSLHelper;
import com.microsoftopentechnologies.tooling.msservices.helpers.azure.AzureCmdException;
import com.microsoftopentechnologies.tooling.msservices.helpers.azure.AzureManagerImpl;
import com.microsoftopentechnologies.tooling.msservices.model.storage.ClientStorageAccount;
import com.microsoftopentechnologies.tooling.msservices.model.storage.StorageAccount;
import com.microsoftopentechnologies.tooling.msservices.model.vm.*;
import org.xml.sax.SAXException;
import javax.security.cert.X509Certificate;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPathExpressionException;
import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;
import java.security.*;
import java.security.cert.CertificateException;
import java.util.*;
import java.util.concurrent.ExecutionException;
public class AzureSDKHelper {
private static class StatusLiterals {
private static final String UNKNOWN = "Unknown";
private static final String READY_ROLE = "ReadyRole";
private static final String STOPPED_VM = "StoppedVM";
private static final String STOPPED_DEALLOCATED = "StoppedDeallocated";
private static final String BUSY_ROLE = "BusyRole";
private static final String CREATING_VM = "CreatingVM";
private static final String CREATING_ROLE = "CreatingRole";
private static final String STARTING_VM = "StartingVM";
private static final String STARTING_ROLE = "StartingRole";
private static final String STOPPING_VM = "StoppingVM";
private static final String STOPPING_ROLE = "StoppingRole";
private static final String DELETING_VM = "DeletingVM";
private static final String RESTARTING_ROLE = "RestartingRole";
private static final String CYCLING_ROLE = "CyclingRole";
private static final String FAILED_STARTING_VM = "FailedStartingVM";
private static final String FAILED_STARTING_ROLE = "FailedStartingRole";
private static final String UNRESPONSIVE_ROLE = "UnresponsiveRole";
private static final String PREPARING = "Preparing";
}
private static final String PERSISTENT_VM_ROLE = "PersistentVMRole";
private static final String NETWORK_CONFIGURATION = "NetworkConfiguration";
private static final String PLATFORM_IMAGE = "Platform";
private static final String USER_IMAGE = "User";
private static final String WINDOWS_OS_TYPE = "Windows";
private static final String LINUX_OS_TYPE = "Linux";
private static final String WINDOWS_PROVISIONING_CONFIGURATION = "WindowsProvisioningConfiguration";
private static final String LINUX_PROVISIONING_CONFIGURATION = "LinuxProvisioningConfiguration";
private static final char[] HEX_ARRAY = "0123456789ABCDEF".toCharArray();
@NotNull
public static SDKRequestCallback<List<CloudService>, ComputeManagementClient> getCloudServices(@NotNull final String subscriptionId) {
return new SDKRequestCallback<List<CloudService>, ComputeManagementClient>() {
@NotNull
@Override
public List<CloudService> execute(@NotNull ComputeManagementClient client)
throws Throwable {
List<CloudService> csList = new ArrayList<CloudService>();
ArrayList<HostedServiceListResponse.HostedService> hostedServices = getHostedServices(client).getHostedServices();
if (hostedServices == null) {
return csList;
}
for (HostedServiceListResponse.HostedService hostedService : hostedServices) {
ListenableFuture<DeploymentGetResponse> productionFuture = getDeploymentAsync(
client,
hostedService.getServiceName(),
DeploymentSlot.Production);
ListenableFuture<DeploymentGetResponse> stagingFuture = getDeploymentAsync(
client,
hostedService.getServiceName(),
DeploymentSlot.Staging);
DeploymentGetResponse prodDGR = productionFuture.get();
DeploymentGetResponse stagingDGR = stagingFuture.get();
CloudService cloudService = new CloudService(
hostedService.getServiceName() != null ? hostedService.getServiceName() : "",
hostedService.getProperties() != null && hostedService.getProperties().getLocation() != null ?
hostedService.getProperties().getLocation() :
"",
hostedService.getProperties() != null && hostedService.getProperties().getAffinityGroup() != null ?
hostedService.getProperties().getAffinityGroup() :
"",
subscriptionId);
loadDeployment(prodDGR, cloudService);
cloudService = loadDeployment(prodDGR, cloudService);
cloudService = loadDeployment(stagingDGR, cloudService);
csList.add(cloudService);
}
return csList;
}
};
}
@NotNull
public static SDKRequestCallback<List<VirtualMachine>, ComputeManagementClient> getVirtualMachines(@NotNull final String subscriptionId) {
return new SDKRequestCallback<List<VirtualMachine>, ComputeManagementClient>() {
@NotNull
@Override
public List<VirtualMachine> execute(@NotNull ComputeManagementClient client)
throws Throwable {
List<VirtualMachine> vmList = new ArrayList<VirtualMachine>();
ArrayList<HostedServiceListResponse.HostedService> hostedServices = getHostedServices(client).getHostedServices();
if (hostedServices == null) {
return vmList;
}
for (HostedServiceListResponse.HostedService hostedService : hostedServices) {
String serviceName = hostedService.getServiceName() != null ? hostedService.getServiceName() : "";
vmList = loadVirtualMachines(client, subscriptionId, serviceName, vmList);
}
return vmList;
}
};
}
@NotNull
public static SDKRequestCallback<VirtualMachine, ComputeManagementClient> refreshVirtualMachineInformation(@NotNull final VirtualMachine vm) {
return new SDKRequestCallback<VirtualMachine, ComputeManagementClient>() {
@NotNull
@Override
public VirtualMachine execute(@NotNull ComputeManagementClient client)
throws Throwable {
DeploymentGetResponse deployment = getDeployment(client, vm);
List<Role> roles = getVMDeploymentRoles(deployment);
Role vmRole = null;
for (Role role : roles) {
if (PERSISTENT_VM_ROLE.equals(role.getRoleType()) && vm.getName().equals(role.getRoleName())) {
vmRole = role;
break;
}
}
if (vmRole == null) {
throw new Exception("Invalid Virtual Machine information. No Roles match the VM data.");
}
vm.setDeploymentName(deployment.getName() != null ? deployment.getName() : "");
vm.setAvailabilitySet(vmRole.getAvailabilitySetName() != null ? vmRole.getAvailabilitySetName() : "");
vm.setSize(vmRole.getRoleSize() != null ? vmRole.getRoleSize() : "");
vm.setStatus(getVMStatus(deployment, vmRole));
vm.getEndpoints().clear();
loadNetworkConfiguration(vmRole, vm);
return vm;
}
};
}
@NotNull
public static SDKRequestCallback<Void, ComputeManagementClient> startVirtualMachine(@NotNull final VirtualMachine vm) {
return new SDKRequestCallback<Void, ComputeManagementClient>() {
@NotNull
@Override
public Void execute(@NotNull ComputeManagementClient client)
throws Throwable {
VirtualMachineOperations vmo = getVirtualMachineOperations(client);
OperationStatusResponse osr = vmo.start(vm.getServiceName(), vm.getDeploymentName(), vm.getName());
validateOperationStatus(osr);
return null;
}
};
}
@NotNull
public static SDKRequestCallback<Void, ComputeManagementClient> shutdownVirtualMachine(@NotNull final VirtualMachine vm,
final boolean deallocate) {
return new SDKRequestCallback<Void, ComputeManagementClient>() {
@NotNull
@Override
public Void execute(@NotNull ComputeManagementClient client)
throws Throwable {
VirtualMachineOperations vmo = getVirtualMachineOperations(client);
VirtualMachineShutdownParameters parameters = new VirtualMachineShutdownParameters();
parameters.setPostShutdownAction(deallocate ? PostShutdownAction.StoppedDeallocated : PostShutdownAction.Stopped);
OperationStatusResponse osr = vmo.shutdown(vm.getServiceName(), vm.getDeploymentName(), vm.getName(), parameters);
validateOperationStatus(osr);
return null;
}
};
}
@NotNull
public static SDKRequestCallback<Void, ComputeManagementClient> restartVirtualMachine(@NotNull final VirtualMachine vm) {
return new SDKRequestCallback<Void, ComputeManagementClient>() {
@NotNull
@Override
public Void execute(@NotNull ComputeManagementClient client)
throws Throwable {
VirtualMachineOperations vmo = getVirtualMachineOperations(client);
OperationStatusResponse osr = vmo.restart(vm.getServiceName(), vm.getDeploymentName(), vm.getName());
validateOperationStatus(osr);
return null;
}
};
}
@NotNull
public static SDKRequestCallback<Void, ComputeManagementClient> deleteVirtualMachine(@NotNull final VirtualMachine vm,
final boolean deleteFromStorage) {
return new SDKRequestCallback<Void, ComputeManagementClient>() {
@NotNull
@Override
public Void execute(@NotNull ComputeManagementClient client)
throws Throwable {
DeploymentGetResponse deployment = getDeployment(client, vm);
List<Role> roles = getVMDeploymentRoles(deployment);
if (roles.size() == 1) {
Role role = roles.get(0);
if (PERSISTENT_VM_ROLE.equals(role.getRoleType()) && vm.getName().equals(role.getRoleName())) {
deleteDeployment(client, vm.getServiceName(), vm.getDeploymentName(), deleteFromStorage);
} else {
throw new Exception("Invalid Virtual Machine information. No Roles match the VM data.");
}
} else if (roles.size() > 1) {
deleteVMRole(client, vm.getServiceName(), vm.getDeploymentName(), vm.getName(), deleteFromStorage);
} else {
throw new Exception("Invalid Virtual Machine information. No Roles match the VM data.");
}
return null;
}
};
}
@NotNull
public static SDKRequestCallback<byte[], ComputeManagementClient> downloadRDP(@NotNull final VirtualMachine vm) {
return new SDKRequestCallback<byte[], ComputeManagementClient>() {
@NotNull
@Override
public byte[] execute(@NotNull ComputeManagementClient client)
throws Throwable {
VirtualMachineOperations vmo = getVirtualMachineOperations(client);
VirtualMachineGetRemoteDesktopFileResponse vmgrdfr = vmo.getRemoteDesktopFile(
vm.getServiceName(),
vm.getDeploymentName(),
vm.getName());
if (vmgrdfr == null) {
throw new Exception("Unable to retrieve RDP information");
}
byte[] remoteDesktopFile = vmgrdfr.getRemoteDesktopFile();
if (remoteDesktopFile == null) {
throw new Exception("Unable to retrieve RDP information");
}
return (new String(remoteDesktopFile, "UTF-8")).getBytes();
}
};
}
@NotNull
public static SDKRequestCallback<List<StorageAccount>, StorageManagementClient> getStorageAccounts(@NotNull final String subscriptionId) {
return new SDKRequestCallback<List<StorageAccount>, StorageManagementClient>() {
@NotNull
@Override
public List<StorageAccount> execute(@NotNull StorageManagementClient client)
throws Throwable {
List<StorageAccount> saList = new ArrayList<StorageAccount>();
ArrayList<com.microsoft.windowsazure.management.storage.models.StorageAccount> storageAccounts =
getStorageAccounts(client).getStorageAccounts();
if (storageAccounts == null) {
return saList;
}
List<ListenableFuture<StorageAccount>> saFutureList = new ArrayList<ListenableFuture<StorageAccount>>();
for (com.microsoft.windowsazure.management.storage.models.StorageAccount storageAccount : storageAccounts) {
saFutureList.add(getStorageAccountAsync(subscriptionId, client, storageAccount));
}
saList.addAll(Futures.allAsList(saFutureList).get());
return saList;
}
};
}
@NotNull
public static SDKRequestCallback<List<VirtualMachineImage>, ComputeManagementClient> getVirtualMachineImages() {
return new SDKRequestCallback<List<VirtualMachineImage>, ComputeManagementClient>() {
@NotNull
@Override
public List<VirtualMachineImage> execute(@NotNull ComputeManagementClient client)
throws Throwable {
List<VirtualMachineImage> vmImageList = new ArrayList<VirtualMachineImage>();
ListenableFuture<List<VirtualMachineImage>> osImagesFuture = getOSImagesAsync(client);
ListenableFuture<List<VirtualMachineImage>> vmImagesFuture = getVMImagesAsync(client);
vmImageList.addAll(osImagesFuture.get());
vmImageList.addAll(vmImagesFuture.get());
return vmImageList;
}
};
}
@NotNull
public static SDKRequestCallback<List<VirtualMachineSize>, ManagementClient> getVirtualMachineSizes() {
return new SDKRequestCallback<List<VirtualMachineSize>, ManagementClient>() {
@NotNull
@Override
public List<VirtualMachineSize> execute(@NotNull ManagementClient client)
throws Throwable {
List<VirtualMachineSize> vmSizeList = new ArrayList<VirtualMachineSize>();
vmSizeList = loadVMSizes(client, vmSizeList);
return vmSizeList;
}
};
}
@NotNull
public static SDKRequestCallback<List<Location>, ManagementClient> getLocations() {
return new SDKRequestCallback<List<Location>, ManagementClient>() {
@NotNull
@Override
public List<Location> execute(@NotNull ManagementClient client) throws Throwable {
List<Location> locationList = new ArrayList<Location>();
locationList = loadLocations(client, locationList);
return locationList;
}
};
}
@NotNull
public static SDKRequestCallback<List<AffinityGroup>, ManagementClient> getAffinityGroups() {
return new SDKRequestCallback<List<AffinityGroup>, ManagementClient>() {
@NotNull
@Override
public List<AffinityGroup> execute(@NotNull ManagementClient client) throws Throwable {
List<AffinityGroup> affinityGroupList = new ArrayList<AffinityGroup>();
affinityGroupList = loadAffinityGroups(client, affinityGroupList);
return affinityGroupList;
}
};
}
@NotNull
public static SDKRequestCallback<List<VirtualNetwork>, NetworkManagementClient> getVirtualNetworks(@NotNull final String subscriptionId) {
return new SDKRequestCallback<List<VirtualNetwork>, NetworkManagementClient>() {
@NotNull
@Override
public List<VirtualNetwork> execute(@NotNull NetworkManagementClient client)
throws Throwable {
List<VirtualNetwork> vnList = new ArrayList<VirtualNetwork>();
ArrayList<NetworkListResponse.VirtualNetworkSite> virtualNetworkSites =
getNetworks(client).getVirtualNetworkSites();
if (virtualNetworkSites == null) {
return vnList;
}
for (NetworkListResponse.VirtualNetworkSite virtualNetworkSite : virtualNetworkSites) {
VirtualNetwork vn = new VirtualNetwork(
virtualNetworkSite.getName() != null ? virtualNetworkSite.getName() : "",
virtualNetworkSite.getId() != null ? virtualNetworkSite.getId() : "",
virtualNetworkSite.getLocation() != null ? virtualNetworkSite.getLocation() : "",
virtualNetworkSite.getAffinityGroup() != null ? virtualNetworkSite.getAffinityGroup() : "",
subscriptionId);
if (virtualNetworkSite.getSubnets() != null) {
Set<String> vnSubnets = vn.getSubnets();
for (NetworkListResponse.Subnet subnet : virtualNetworkSite.getSubnets()) {
if (subnet.getName() != null && !subnet.getName().isEmpty()) {
vnSubnets.add(subnet.getName());
}
}
}
vnList.add(vn);
}
return vnList;
}
};
}
@NotNull
public static SDKRequestCallback<Void, StorageManagementClient> createStorageAccount(@NotNull final StorageAccount storageAccount) {
return new SDKRequestCallback<Void, StorageManagementClient>() {
@NotNull
@Override
public Void execute(@NotNull StorageManagementClient client)
throws Throwable {
StorageAccountOperations sao = getStorageAccountOperations(client);
StorageAccountCreateParameters sacp = new StorageAccountCreateParameters(storageAccount.getName(),
storageAccount.getName());
sacp.setAccountType(storageAccount.getType());
if (!storageAccount.getAffinityGroup().isEmpty()) {
sacp.setAffinityGroup(storageAccount.getAffinityGroup());
} else if (!storageAccount.getLocation().isEmpty()) {
sacp.setLocation(storageAccount.getLocation());
}
OperationStatusResponse osr = sao.create(sacp);
validateOperationStatus(osr);
return null;
}
};
}
@NotNull
public static SDKRequestCallback<Void, ComputeManagementClient> createCloudService(@NotNull final CloudService cloudService) {
return new SDKRequestCallback<Void, ComputeManagementClient>() {
@NotNull
@Override
public Void execute(@NotNull ComputeManagementClient client) throws Throwable {
HostedServiceOperations hso = getHostedServiceOperations(client);
HostedServiceCreateParameters hscp = new HostedServiceCreateParameters(cloudService.getName(),
cloudService.getName());
if (!cloudService.getAffinityGroup().isEmpty()) {
hscp.setAffinityGroup(cloudService.getAffinityGroup());
} else if (!cloudService.getLocation().isEmpty()) {
hscp.setLocation(cloudService.getLocation());
}
OperationResponse or = hso.create(hscp);
if (or == null) {
throw new Exception("Unable to retrieve Operation");
}
OperationStatusResponse osr = getOperationStatusResponse(client, or);
validateOperationStatus(osr);
return null;
}
};
}
@NotNull
public static SDKRequestCallback<Void, ComputeManagementClient> createVirtualMachine(@NotNull final VirtualMachine virtualMachine, @NotNull final VirtualMachineImage vmImage,
@NotNull final StorageAccount storageAccount, @NotNull final String virtualNetwork,
@NotNull final String username, @NotNull final String password, @NotNull final byte[] certificate) {
return new SDKRequestCallback<Void, ComputeManagementClient>() {
@NotNull
@Override
public Void execute(@NotNull ComputeManagementClient client)
throws Throwable {
String mediaLocation = getMediaLocation(virtualMachine, storageAccount);
return createVirtualMachine(virtualMachine, vmImage, mediaLocation, virtualNetwork, username, password, certificate).execute(client);
}
};
}
@NotNull
public static SDKRequestCallback<Void, ComputeManagementClient> createVirtualMachine(@NotNull final VirtualMachine virtualMachine, @NotNull final VirtualMachineImage vmImage,
@NotNull final String mediaLocation, @NotNull final String virtualNetwork,
@NotNull final String username, @NotNull final String password, @NotNull final byte[] certificate) {
return new SDKRequestCallback<Void, ComputeManagementClient>() {
@NotNull
@Override
public Void execute(@NotNull ComputeManagementClient client)
throws Throwable {
VirtualMachineOperations vmo = getVirtualMachineOperations(client);
if (virtualMachine.getDeploymentName().isEmpty()) {
createVMDeployment(client, vmo, virtualMachine, vmImage, mediaLocation, virtualNetwork, username, password, certificate);
} else {
createVM(client, vmo, virtualMachine, vmImage, mediaLocation, username, password, certificate);
}
return null;
}
};
}
@NotNull
public static SDKRequestCallback<StorageAccount, StorageManagementClient> refreshStorageAccountInformation(@NotNull final StorageAccount storageAccount) {
return new SDKRequestCallback<StorageAccount, StorageManagementClient>() {
@NotNull
@Override
public StorageAccount execute(@NotNull StorageManagementClient client)
throws Throwable {
StorageAccountOperations sao = getStorageAccountOperations(client);
StorageAccountGetResponse sagr = sao.get(storageAccount.getName());
if (sagr == null) {
throw new Exception("Unable to retrieve Operation");
}
OperationStatusResponse osr = getOperationStatusResponse(client, sagr);
validateOperationStatus(osr);
if (sagr.getStorageAccount() == null) {
throw new Exception("Invalid Storage Account information. No Storage Account matches the specified data.");
}
StorageAccount sa = getStorageAccount(storageAccount.getSubscriptionId(), client, sagr.getStorageAccount());
storageAccount.setType(sa.getType());
storageAccount.setDescription(sa.getDescription());
storageAccount.setLabel(sa.getLabel());
storageAccount.setStatus(sa.getStatus());
storageAccount.setLocation(sa.getLocation());
storageAccount.setAffinityGroup(sa.getAffinityGroup());
storageAccount.setPrimaryKey(sa.getPrimaryKey());
storageAccount.setSecondaryKey(sa.getSecondaryKey());
storageAccount.setManagementUri(sa.getManagementUri());
storageAccount.setBlobsUri(sa.getBlobsUri());
storageAccount.setQueuesUri(sa.getQueuesUri());
storageAccount.setTablesUri(sa.getTablesUri());
storageAccount.setPrimaryRegion(sa.getPrimaryRegion());
storageAccount.setPrimaryRegionStatus(sa.getPrimaryRegionStatus());
storageAccount.setSecondaryRegion(sa.getSecondaryRegion());
storageAccount.setSecondaryRegionStatus(sa.getSecondaryRegionStatus());
storageAccount.setLastFailover(sa.getLastFailover());
return storageAccount;
}
};
}
@NotNull
public static SDKRequestCallback<String, ComputeManagementClient> createServiceCertificate(@NotNull final String serviceName,
@NotNull final byte[] data,
@NotNull final String password) {
return new SDKRequestCallback<String, ComputeManagementClient>() {
@NotNull
@Override
public String execute(@NotNull ComputeManagementClient client)
throws Throwable {
MessageDigest md = MessageDigest.getInstance("SHA1");
X509Certificate cert = X509Certificate.getInstance(data);
md.update(cert.getEncoded());
String thumbprint = bytesToHex(md.digest());
ServiceCertificateOperations sco = getServiceCertificateOperations(client);
ServiceCertificateCreateParameters sccp = new ServiceCertificateCreateParameters(data,
CertificateFormat.Pfx);
sccp.setPassword(password);
OperationStatusResponse osr = sco.create(serviceName, sccp);
validateOperationStatus(osr);
return thumbprint;
}
};
}
@NotNull
public static SDKRequestCallback<Void, StorageManagementClient> deleteStorageAccount(@NotNull final StorageAccount storageAccount) {
return new SDKRequestCallback<Void, StorageManagementClient>() {
@NotNull
@Override
public Void execute(@NotNull StorageManagementClient client)
throws Throwable {
StorageAccountOperations sao = getStorageAccountOperations(client);
OperationResponse or = sao.delete(storageAccount.getName());
OperationStatusResponse osr = getOperationStatusResponse(client, or);
validateOperationStatus(osr);
return null;
}
};
}
@NotNull
public static ComputeManagementClient getComputeManagementClient(@NotNull String subscriptionId,
@NotNull String accessToken)
throws IOException, CertificateException, NoSuchAlgorithmException, KeyStoreException,
XPathExpressionException, ParserConfigurationException, SAXException, AzureCmdException {
Configuration configuration = getConfigurationFromAuthToken(subscriptionId);
if (configuration == null) {
throw new AzureCmdException("Unable to instantiate Configuration");
}
ComputeManagementClient client = ComputeManagementService.create(configuration);
if (client == null) {
throw new AzureCmdException("Unable to instantiate Compute Management client");
}
// add a request filter for tacking on the A/D auth token if the current authentication
// mode is active directory
AuthTokenRequestFilter requestFilter = new AuthTokenRequestFilter(accessToken);
return client.withRequestFilterFirst(requestFilter);
}
@NotNull
public static ComputeManagementClient getComputeManagementClient(@NotNull String subscriptionId,
@NotNull String managementCertificate,
@NotNull String serviceManagementUrl)
throws IOException, CertificateException, NoSuchAlgorithmException, KeyStoreException,
XPathExpressionException, ParserConfigurationException, SAXException, AzureCmdException {
Configuration configuration = getConfigurationFromCertificate(subscriptionId, managementCertificate,
serviceManagementUrl);
if (configuration == null) {
throw new AzureCmdException("Unable to instantiate Configuration");
}
ComputeManagementClient client = ComputeManagementService.create(configuration);
if (client == null) {
throw new AzureCmdException("Unable to instantiate Compute Management client");
}
return client;
}
@NotNull
public static StorageManagementClient getStorageManagementClient(@NotNull String subscriptionId,
@NotNull String accessToken)
throws IOException, CertificateException, NoSuchAlgorithmException, KeyStoreException,
XPathExpressionException, ParserConfigurationException, SAXException, AzureCmdException {
Configuration configuration = getConfigurationFromAuthToken(subscriptionId);
if (configuration == null) {
throw new AzureCmdException("Unable to instantiate Configuration");
}
StorageManagementClient client = StorageManagementService.create(configuration);
if (client == null) {
throw new AzureCmdException("Unable to instantiate Storage Management client");
}
// add a request filter for tacking on the A/D auth token if the current authentication
// mode is active directory
AuthTokenRequestFilter requestFilter = new AuthTokenRequestFilter(accessToken);
return client.withRequestFilterFirst(requestFilter);
}
@NotNull
public static StorageManagementClient getStorageManagementClient(@NotNull String subscriptionId,
@NotNull String managementCertificate,
@NotNull String serviceManagementUrl)
throws IOException, CertificateException, NoSuchAlgorithmException, KeyStoreException,
XPathExpressionException, ParserConfigurationException, SAXException, AzureCmdException {
Configuration configuration = getConfigurationFromCertificate(subscriptionId, managementCertificate,
serviceManagementUrl);
if (configuration == null) {
throw new AzureCmdException("Unable to instantiate Configuration");
}
StorageManagementClient client = StorageManagementService.create(configuration);
if (client == null) {
throw new AzureCmdException("Unable to instantiate Storage Management client");
}
return client;
}
@NotNull
public static NetworkManagementClient getNetworkManagementClient(@NotNull String subscriptionId,
@NotNull String accessToken)
throws IOException, CertificateException, NoSuchAlgorithmException, KeyStoreException,
XPathExpressionException, ParserConfigurationException, SAXException, AzureCmdException {
Configuration configuration = getConfigurationFromAuthToken(subscriptionId);
if (configuration == null) {
throw new AzureCmdException("Unable to instantiate Configuration");
}
NetworkManagementClient client = NetworkManagementService.create(configuration);
if (client == null) {
throw new AzureCmdException("Unable to instantiate Network Management client");
}
// add a request filter for tacking on the A/D auth token if the current authentication
// mode is active directory
AuthTokenRequestFilter requestFilter = new AuthTokenRequestFilter(accessToken);
return client.withRequestFilterFirst(requestFilter);
}
@NotNull
public static NetworkManagementClient getNetworkManagementClient(@NotNull String subscriptionId,
@NotNull String managementCertificate,
@NotNull String serviceManagementUrl)
throws IOException, CertificateException, NoSuchAlgorithmException, KeyStoreException,
XPathExpressionException, ParserConfigurationException, SAXException, AzureCmdException {
Configuration configuration = getConfigurationFromCertificate(subscriptionId, managementCertificate,
serviceManagementUrl);
if (configuration == null) {
throw new AzureCmdException("Unable to instantiate Configuration");
}
NetworkManagementClient client = NetworkManagementService.create(configuration);
if (client == null) {
throw new AzureCmdException("Unable to instantiate Network Management client");
}
return client;
}
@NotNull
public static ManagementClient getManagementClient(@NotNull String subscriptionId,
@NotNull String accessToken)
throws IOException, CertificateException, NoSuchAlgorithmException, KeyStoreException,
XPathExpressionException, ParserConfigurationException, SAXException, AzureCmdException {
Configuration configuration = getConfigurationFromAuthToken(subscriptionId);
if (configuration == null) {
throw new AzureCmdException("Unable to instantiate Configuration");
}
ManagementClient client = ManagementService.create(configuration);
if (client == null) {
throw new AzureCmdException("Unable to instantiate Management client");
}
// add a request filter for tacking on the A/D auth token if the current authentication
// mode is active directory
AuthTokenRequestFilter requestFilter = new AuthTokenRequestFilter(accessToken);
return client.withRequestFilterFirst(requestFilter);
}
@NotNull
public static ManagementClient getManagementClient(@NotNull String subscriptionId,
@NotNull String managementCertificate,
@NotNull String serviceManagementUrl)
throws IOException, CertificateException, NoSuchAlgorithmException, KeyStoreException,
XPathExpressionException, ParserConfigurationException, SAXException, AzureCmdException {
Configuration configuration = getConfigurationFromCertificate(subscriptionId, managementCertificate,
serviceManagementUrl);
if (configuration == null) {
throw new AzureCmdException("Unable to instantiate Configuration");
}
ManagementClient client = ManagementService.create(configuration);
if (client == null) {
throw new AzureCmdException("Unable to instantiate Management client");
}
return client;
}
@NotNull
public static CloudStorageAccount getCloudStorageAccount(@NotNull ClientStorageAccount storageAccount)
throws URISyntaxException, InvalidKeyException {
return CloudStorageAccount.parse(storageAccount.getConnectionString());
}
@NotNull
private static CloudBlobClient getCloudBlobClient(@NotNull ClientStorageAccount storageAccount)
throws Exception {
CloudStorageAccount csa = getCloudStorageAccount(storageAccount);
return csa.createCloudBlobClient();
}
@NotNull
private static HostedServiceOperations getHostedServiceOperations(@NotNull ComputeManagementClient client)
throws Exception {
HostedServiceOperations hso = client.getHostedServicesOperations();
if (hso == null) {
throw new Exception("Unable to retrieve Hosted Services information");
}
return hso;
}
@NotNull
private static DeploymentOperations getDeploymentOperations(@NotNull ComputeManagementClient client)
throws Exception {
DeploymentOperations dop = client.getDeploymentsOperations();
if (dop == null) {
throw new Exception("Unable to retrieve Deployment information");
}
return dop;
}
@NotNull
private static VirtualMachineOperations getVirtualMachineOperations(@NotNull ComputeManagementClient client)
throws Exception {
VirtualMachineOperations vmo = client.getVirtualMachinesOperations();
if (vmo == null) {
throw new Exception("Unable to retrieve Virtual Machines Information");
}
return vmo;
}
@NotNull
private static VirtualMachineOSImageOperations getVirtualMachineOSImageOperations(@NotNull ComputeManagementClient client)
throws Exception {
VirtualMachineOSImageOperations vmosio = client.getVirtualMachineOSImagesOperations();
if (vmosio == null) {
throw new Exception("Unable to retrieve OS Images information");
}
return vmosio;
}
@NotNull
private static VirtualMachineVMImageOperations getVirtualMachineVMImageOperations(@NotNull ComputeManagementClient client)
throws Exception {
VirtualMachineVMImageOperations vmvmio = client.getVirtualMachineVMImagesOperations();
if (vmvmio == null) {
throw new Exception("Unable to retrieve VM Images information");
}
return vmvmio;
}
@NotNull
private static RoleSizeOperations getRoleSizeOperations(@NotNull ManagementClient client)
throws Exception {
RoleSizeOperations rso = client.getRoleSizesOperations();
if (rso == null) {
throw new Exception("Unable to retrieve Role Sizes information");
}
return rso;
}
@NotNull
private static LocationOperations getLocationsOperations(@NotNull ManagementClient client)
throws Exception {
LocationOperations lo = client.getLocationsOperations();
if (lo == null) {
throw new Exception("Unable to retrieve Locations information");
}
return lo;
}
@NotNull
private static AffinityGroupOperations getAffinityGroupOperations(@NotNull ManagementClient client)
throws Exception {
AffinityGroupOperations ago = client.getAffinityGroupsOperations();
if (ago == null) {
throw new Exception("Unable to retrieve Affinity Groups information");
}
return ago;
}
@NotNull
private static StorageAccountOperations getStorageAccountOperations(@NotNull StorageManagementClient client)
throws Exception {
StorageAccountOperations sao = client.getStorageAccountsOperations();
if (sao == null) {
throw new Exception("Unable to retrieve Storage Accounts information");
}
return sao;
}
@NotNull
private static NetworkOperations getNetworkOperations(@NotNull NetworkManagementClient client)
throws Exception {
NetworkOperations no = client.getNetworksOperations();
if (no == null) {
throw new Exception("Unable to retrieve Network information");
}
return no;
}
@NotNull
private static ServiceCertificateOperations getServiceCertificateOperations(@NotNull ComputeManagementClient client)
throws Exception {
ServiceCertificateOperations sco = client.getServiceCertificatesOperations();
if (sco == null) {
throw new Exception("Unable to retrieve Service Certificate information");
}
return sco;
}
@NotNull
private static HostedServiceListResponse getHostedServices(@NotNull ComputeManagementClient client)
throws Exception {
HostedServiceOperations hso = getHostedServiceOperations(client);
HostedServiceListResponse hslr = hso.list();
if (hslr == null) {
throw new Exception("Unable to retrieve Hosted Services information");
}
return hslr;
}
@NotNull
private static ListenableFuture<DeploymentGetResponse> getDeploymentAsync(@NotNull final ComputeManagementClient client,
@NotNull final String serviceName,
@NotNull final DeploymentSlot slot) {
final SettableFuture<DeploymentGetResponse> future = SettableFuture.create();
DefaultLoader.getIdeHelper().executeOnPooledThread(new Runnable() {
@Override
public void run() {
try {
future.set(getDeployment(client, serviceName, slot));
} catch (Exception e) {
future.setException(e);
}
}
});
return future;
}
@NotNull
private static DeploymentGetResponse getDeployment(@NotNull ComputeManagementClient client,
@NotNull String serviceName,
@NotNull DeploymentSlot slot)
throws Exception {
try {
DeploymentGetResponse dgr = getDeploymentOperations(client).getBySlot(serviceName, slot);
if (dgr == null) {
throw new Exception("Unable to retrieve Deployment information");
}
return dgr;
} catch (ServiceException se) {
if (se.getHttpStatusCode() == 404) {
return new DeploymentGetResponse();
} else {
throw se;
}
}
}
@NotNull
private static DeploymentGetResponse getDeployment(@NotNull ComputeManagementClient client,
@NotNull VirtualMachine vm)
throws Exception {
return getDeployment(client, vm.getServiceName(), DeploymentSlot.Production);
}
@NotNull
private static StorageAccountListResponse getStorageAccounts(@NotNull StorageManagementClient client) throws Exception {
StorageAccountListResponse salr = getStorageAccountOperations(client).list();
if (salr == null) {
throw new Exception("Unable to retrieve Storage Accounts information");
}
return salr;
}
@NotNull
private static ListenableFuture<StorageAccount> getStorageAccountAsync(@NotNull final String subscriptionId,
@NotNull final StorageManagementClient client,
@NotNull final com.microsoft.windowsazure.management.storage.models.StorageAccount storageAccount)
throws Exception {
final SettableFuture<StorageAccount> future = SettableFuture.create();
DefaultLoader.getIdeHelper().executeOnPooledThread(new Runnable() {
@Override
public void run() {
try {
future.set(getStorageAccount(subscriptionId, client, storageAccount));
} catch (Exception e) {
future.setException(e);
}
}
});
return future;
}
@NotNull
private static StorageAccount getStorageAccount(@NotNull String subscriptionId,
@NotNull StorageManagementClient client,
@NotNull com.microsoft.windowsazure.management.storage.models.StorageAccount storageAccount) throws Exception {
String primaryKey = "";
String secondaryKey = "";
if (storageAccount.getName() != null) {
StorageAccountGetKeysResponse sak = getStorageAccountKeys(client, storageAccount.getName());
primaryKey = sak.getPrimaryKey();
secondaryKey = sak.getSecondaryKey();
}
StorageAccountProperties sap = storageAccount.getProperties() != null ?
storageAccount.getProperties() :
new StorageAccountProperties();
String blobsUri = "";
String queuesUri = "";
String tablesUri = "";
ArrayList<URI> endpoints = sap.getEndpoints();
if (endpoints != null && endpoints.size() > 0) {
blobsUri = endpoints.get(0).toString();
if (endpoints.size() > 1) {
queuesUri = endpoints.get(1).toString();
if (endpoints.size() > 2) {
tablesUri = endpoints.get(2).toString();
}
}
}
StorageAccount sa = new StorageAccount(Strings.nullToEmpty(storageAccount.getName()), subscriptionId);
sa.setPrimaryKey(Strings.nullToEmpty(primaryKey));
sa.setProtocol("https");
sa.setBlobsUri(blobsUri);
sa.setQueuesUri(queuesUri);
sa.setTablesUri(tablesUri);
sa.setUseCustomEndpoints(true);
sa.setType(Strings.nullToEmpty(sap.getAccountType()));
sa.setDescription(Strings.nullToEmpty(sap.getDescription()));
sa.setLabel(Strings.nullToEmpty(sap.getLabel()));
sa.setStatus(sap.getStatus() != null ? sap.getStatus().toString() : "");
sa.setLocation(Strings.nullToEmpty(sap.getLocation()));
sa.setAffinityGroup(Strings.nullToEmpty(sap.getAffinityGroup()));
sa.setSecondaryKey(Strings.nullToEmpty(secondaryKey));
sa.setManagementUri(storageAccount.getUri() != null ? storageAccount.getUri().toString() : "");
sa.setPrimaryRegion(Strings.nullToEmpty(sap.getGeoPrimaryRegion()));
sa.setPrimaryRegionStatus(sap.getStatusOfGeoPrimaryRegion() != null ? sap.getStatusOfGeoPrimaryRegion().toString() : "");
sa.setSecondaryRegion(Strings.nullToEmpty(sap.getGeoSecondaryRegion()));
sa.setSecondaryRegionStatus(sap.getStatusOfGeoSecondaryRegion() != null ? sap.getStatusOfGeoSecondaryRegion().toString() : "");
sa.setLastFailover(sap.getLastGeoFailoverTime() != null ? sap.getLastGeoFailoverTime() : new GregorianCalendar());
return sa;
}
@NotNull
private static StorageAccountGetKeysResponse getStorageAccountKeys(@NotNull StorageManagementClient client,
@NotNull String storageName)
throws Exception {
StorageAccountGetKeysResponse sagkr = getStorageAccountOperations(client).getKeys(storageName);
if (sagkr == null) {
throw new Exception("Unable to retrieve Storage Account Keys information");
}
return sagkr;
}
@NotNull
private static List<Role> getVMDeploymentRoles(@NotNull DeploymentGetResponse deployment) throws Exception {
ArrayList<Role> roles = deployment.getRoles();
if (roles == null) {
throw new Exception("Invalid Virtual Machine information. No Roles match the VM data.");
}
return roles;
}
@NotNull
private static NetworkListResponse getNetworks(@NotNull NetworkManagementClient client) throws Exception {
NetworkListResponse nlr = getNetworkOperations(client).list();
if (nlr == null) {
throw new Exception("Unable to retrieve Networks information");
}
return nlr;
}
private static void validateOperationStatus(@Nullable OperationStatusResponse osr) throws Exception {
if (osr == null) {
throw new Exception("Unable to retrieve Operation Status");
}
if (osr.getError() != null) {
throw new Exception(osr.getError().getMessage());
}
}
@Nullable
private static OperationStatusResponse getOperationStatusResponse(@NotNull ComputeManagementClient client,
@NotNull OperationResponse or)
throws InterruptedException, java.util.concurrent.ExecutionException, ServiceException {
OperationStatusResponse osr = client.getOperationStatusAsync(or.getRequestId()).get();
int delayInSeconds = 30;
if (client.getLongRunningOperationInitialTimeout() >= 0) {
delayInSeconds = client.getLongRunningOperationInitialTimeout();
}
while (osr.getStatus() == OperationStatus.InProgress) {
Thread.sleep(delayInSeconds * 1000);
osr = client.getOperationStatusAsync(or.getRequestId()).get();
delayInSeconds = 30;
if (client.getLongRunningOperationRetryTimeout() >= 0) {
delayInSeconds = client.getLongRunningOperationRetryTimeout();
}
}
if (osr.getStatus() != OperationStatus.Succeeded) {
if (osr.getError() != null) {
ServiceException ex = new ServiceException(osr.getError().getCode() + " : " + osr.getError().getMessage());
ex.setErrorCode(osr.getError().getCode());
ex.setErrorMessage(osr.getError().getMessage());
throw ex;
} else {
throw new ServiceException("");
}
}
return osr;
}
@Nullable
private static OperationStatusResponse getOperationStatusResponse(@NotNull StorageManagementClient client,
@NotNull OperationResponse or)
throws InterruptedException, java.util.concurrent.ExecutionException, ServiceException {
OperationStatusResponse osr = client.getOperationStatusAsync(or.getRequestId()).get();
int delayInSeconds = 30;
if (client.getLongRunningOperationInitialTimeout() >= 0) {
delayInSeconds = client.getLongRunningOperationInitialTimeout();
}
while (osr.getStatus() == OperationStatus.InProgress) {
Thread.sleep(delayInSeconds * 1000);
osr = client.getOperationStatusAsync(or.getRequestId()).get();
delayInSeconds = 30;
if (client.getLongRunningOperationRetryTimeout() >= 0) {
delayInSeconds = client.getLongRunningOperationRetryTimeout();
}
}
if (osr.getStatus() != OperationStatus.Succeeded) {
if (osr.getError() != null) {
ServiceException ex = new ServiceException(osr.getError().getCode() + " : " + osr.getError().getMessage());
ex.setErrorCode(osr.getError().getCode());
ex.setErrorMessage(osr.getError().getMessage());
throw ex;
} else {
throw new ServiceException("");
}
}
return osr;
}
@NotNull
private static CloudService loadDeployment(@NotNull DeploymentGetResponse deployment,
@NotNull CloudService cloudService)
throws Exception {
if (deployment.getDeploymentSlot() != null) {
CloudService.Deployment dep;
switch (deployment.getDeploymentSlot()) {
case Production:
dep = cloudService.getProductionDeployment();
break;
case Staging:
dep = cloudService.getStagingDeployment();
break;
default:
return cloudService;
}
dep.setName(deployment.getName() != null ? deployment.getName() : "");
dep.setVirtualNetwork(deployment.getVirtualNetworkName() != null ? deployment.getVirtualNetworkName() : "");
if (deployment.getRoles() != null) {
Set<String> virtualMachines = dep.getVirtualMachines();
Set<String> computeRoles = dep.getComputeRoles();
Set<String> availabilitySets = dep.getAvailabilitySets();
for (Role role : deployment.getRoles()) {
if (role.getRoleType() != null && role.getRoleType().equals(PERSISTENT_VM_ROLE)) {
if (role.getRoleName() != null && !role.getRoleName().isEmpty()) {
virtualMachines.add(role.getRoleName());
}
if (role.getAvailabilitySetName() != null && !role.getAvailabilitySetName().isEmpty()) {
availabilitySets.add(role.getAvailabilitySetName());
}
} else {
if (role.getRoleName() != null && !role.getRoleName().isEmpty()) {
computeRoles.add(role.getRoleName());
}
}
}
}
}
return cloudService;
}
@NotNull
private static List<VirtualMachine> loadVirtualMachines(@NotNull ComputeManagementClient client,
@NotNull String subscriptionId,
@NotNull String serviceName,
@NotNull List<VirtualMachine> vmList)
throws Exception {
DeploymentGetResponse deployment = getDeployment(client, serviceName, DeploymentSlot.Production);
if (deployment.getRoles() == null) {
return vmList;
}
for (Role role : deployment.getRoles()) {
if (role.getRoleType() != null
&& role.getRoleType().equals(PERSISTENT_VM_ROLE)) {
VirtualMachine vm = new VirtualMachine(
role.getRoleName() != null ? role.getRoleName() : "",
serviceName,
deployment.getName() != null ? deployment.getName() : "",
role.getAvailabilitySetName() != null ? role.getAvailabilitySetName() : "",
"",
role.getRoleSize() != null ? role.getRoleSize() : "",
getVMStatus(deployment, role),
subscriptionId);
loadNetworkConfiguration(role, vm);
vmList.add(vm);
}
}
return vmList;
}
private static void loadNetworkConfiguration(@NotNull Role role, @NotNull VirtualMachine vm) {
if (role.getConfigurationSets() != null) {
List<Endpoint> endpoints = vm.getEndpoints();
for (ConfigurationSet configurationSet : role.getConfigurationSets()) {
if (configurationSet.getConfigurationSetType() != null
&& configurationSet.getConfigurationSetType().equals(NETWORK_CONFIGURATION)) {
if (configurationSet.getInputEndpoints() != null) {
for (InputEndpoint inputEndpoint : configurationSet.getInputEndpoints()) {
endpoints.add(new Endpoint(
inputEndpoint.getName() != null ? inputEndpoint.getName() : "",
inputEndpoint.getProtocol() != null ? inputEndpoint.getProtocol() : "",
inputEndpoint.getLocalPort(),
inputEndpoint.getPort()));
}
}
if (configurationSet.getSubnetNames() != null && configurationSet.getSubnetNames().size() == 1) {
vm.setSubnet(configurationSet.getSubnetNames().get(0));
}
break;
}
}
}
}
private static void deleteVMRole(@NotNull ComputeManagementClient client, @NotNull String serviceName,
@NotNull String deploymentName, @NotNull String virtualMachineName,
boolean deleteFromStorage)
throws Exception {
VirtualMachineOperations vmo = getVirtualMachineOperations(client);
OperationStatusResponse osr = vmo.delete(serviceName, deploymentName, virtualMachineName, deleteFromStorage);
validateOperationStatus(osr);
}
private static void deleteDeployment(@NotNull ComputeManagementClient client, @NotNull String serviceName,
@NotNull String deploymentName, boolean deleteFromStorage)
throws Exception {
DeploymentOperations dop = getDeploymentOperations(client);
OperationStatusResponse osr = dop.deleteByName(serviceName, deploymentName, deleteFromStorage);
validateOperationStatus(osr);
}
@NotNull
private static String getMediaLocation(@NotNull VirtualMachine virtualMachine,
@NotNull StorageAccount storageAccount)
throws Exception {
Calendar calendar = GregorianCalendar.getInstance();
String blobName = String.format("%s-%s-0-%04d%02d%02d%02d%02d%02d%04d.vhd",
virtualMachine.getServiceName(),
virtualMachine.getName(),
calendar.get(Calendar.YEAR),
calendar.get(Calendar.MONTH) + 1,
calendar.get(Calendar.DATE),
calendar.get(Calendar.HOUR_OF_DAY),
calendar.get(Calendar.MINUTE),
calendar.get(Calendar.SECOND),
calendar.get(Calendar.MILLISECOND));
CloudBlobClient cloudBlobClient = getCloudBlobClient(storageAccount);
CloudBlobContainer container = cloudBlobClient.getContainerReference("vhds");
container.createIfNotExists();
return container.getUri().toString() + "/" + blobName;
}
@NotNull
private static ListenableFuture<List<VirtualMachineImage>> getOSImagesAsync(
@NotNull final ComputeManagementClient client) {
final SettableFuture<List<VirtualMachineImage>> future = SettableFuture.create();
DefaultLoader.getIdeHelper().executeOnPooledThread(new Runnable() {
@Override
public void run() {
try {
future.set(getOSImages(client));
} catch (Exception e) {
future.setException(e);
}
}
});
return future;
}
@NotNull
private static List<VirtualMachineImage> getOSImages(@NotNull ComputeManagementClient client)
throws Exception {
List<VirtualMachineImage> vmImageList = new ArrayList<VirtualMachineImage>();
VirtualMachineOSImageListResponse osImages = getVirtualMachineOSImageOperations(client).list();
if (osImages != null) {
for (VirtualMachineOSImageListResponse.VirtualMachineOSImage osImage : osImages) {
vmImageList.add(
new VirtualMachineImage(
osImage.getName() != null ? osImage.getName() : "",
PLATFORM_IMAGE,
osImage.getCategory() != null ? osImage.getCategory() : "",
osImage.getPublisherName() != null ? osImage.getPublisherName() : "",
osImage.getPublishedDate() != null ?
osImage.getPublishedDate() :
GregorianCalendar.getInstance(),
osImage.getLabel() != null ? osImage.getLabel() : "",
osImage.getDescription() != null ? osImage.getDescription() : "",
osImage.getOperatingSystemType() != null ? osImage.getOperatingSystemType() : "",
osImage.getLocation() != null ? osImage.getLocation() : "",
osImage.getEula() != null ? osImage.getEula() : "",
osImage.getPrivacyUri() != null ? osImage.getPrivacyUri().toString() : "",
osImage.getPricingDetailUri() != null ? osImage.getPricingDetailUri().toString() : "",
osImage.getRecommendedVMSize() != null ? osImage.getRecommendedVMSize() : "",
osImage.isShowInGui() != null ? osImage.isShowInGui() : true));
}
}
return vmImageList;
}
@NotNull
private static ListenableFuture<List<VirtualMachineImage>> getVMImagesAsync(
@NotNull final ComputeManagementClient client) {
final SettableFuture<List<VirtualMachineImage>> future = SettableFuture.create();
DefaultLoader.getIdeHelper().executeOnPooledThread(new Runnable() {
@Override
public void run() {
try {
future.set(getVMImages(client));
} catch (Exception e) {
future.setException(e);
}
}
});
return future;
}
@NotNull
private static List<VirtualMachineImage> getVMImages(@NotNull ComputeManagementClient client)
throws Exception {
List<VirtualMachineImage> vmImageList = new ArrayList<VirtualMachineImage>();
VirtualMachineVMImageListResponse vmImages = getVirtualMachineVMImageOperations(client).list();
if (vmImages != null) {
for (VirtualMachineVMImageListResponse.VirtualMachineVMImage vmImage : vmImages) {
vmImageList.add(
new VirtualMachineImage(
vmImage.getName() != null ? vmImage.getName() : "",
USER_IMAGE,
vmImage.getCategory() != null ? vmImage.getCategory() : "",
vmImage.getPublisherName() != null ? vmImage.getPublisherName() : "",
vmImage.getPublishedDate() != null ?
vmImage.getPublishedDate() :
GregorianCalendar.getInstance(),
vmImage.getLabel() != null ? vmImage.getLabel() : "",
vmImage.getDescription() != null ? vmImage.getDescription() : "",
vmImage.getOSDiskConfiguration() != null
&& vmImage.getOSDiskConfiguration().getOperatingSystem() != null ?
vmImage.getOSDiskConfiguration().getOperatingSystem() :
"",
vmImage.getLocation() != null ? vmImage.getLocation() : "",
vmImage.getEula() != null ? vmImage.getEula() : "",
vmImage.getPrivacyUri() != null ? vmImage.getPrivacyUri().toString() : "",
vmImage.getPricingDetailLink() != null ? vmImage.getPricingDetailLink().toString() : "",
vmImage.getRecommendedVMSize() != null ? vmImage.getRecommendedVMSize() : "",
vmImage.isShowInGui() != null ? vmImage.isShowInGui() : true));
}
}
return vmImageList;
}
@NotNull
private static List<VirtualMachineSize> loadVMSizes(@NotNull ManagementClient client,
@NotNull List<VirtualMachineSize> vmSizeList)
throws Exception {
RoleSizeListResponse rslr = getRoleSizeOperations(client).list();
if (rslr == null) {
throw new Exception("Unable to retrieve Role Sizes information");
}
if (rslr.getRoleSizes() != null) {
for (RoleSizeListResponse.RoleSize rs : rslr.getRoleSizes()) {
if (rs.isSupportedByVirtualMachines()) {
vmSizeList.add(
new VirtualMachineSize(
rs.getName() != null ? rs.getName() : "",
rs.getLabel() != null ? rs.getLabel() : "",
rs.getCores(),
rs.getMemoryInMb()
));
}
}
}
return vmSizeList;
}
@NotNull
private static List<Location> loadLocations(@NotNull ManagementClient client,
@NotNull List<Location> locationList)
throws Exception {
LocationsListResponse llr = getLocationsOperations(client).list();
if (llr == null) {
throw new Exception("Unable to retrieve Locations information");
}
if (llr.getLocations() != null) {
for (LocationsListResponse.Location location : llr.getLocations()) {
locationList.add(
new Location(
location.getName() != null ? location.getName() : "",
location.getDisplayName() != null ? location.getDisplayName() : ""
));
}
}
return locationList;
}
@NotNull
private static List<AffinityGroup> loadAffinityGroups(@NotNull ManagementClient client,
@NotNull List<AffinityGroup> affinityGroupList)
throws Exception {
AffinityGroupListResponse aglr = getAffinityGroupOperations(client).list();
if (aglr == null) {
throw new Exception("Unable to retrieve Affinity Groups information");
}
if (aglr.getAffinityGroups() != null) {
for (AffinityGroupListResponse.AffinityGroup ag : aglr.getAffinityGroups()) {
affinityGroupList.add(
new AffinityGroup(
ag.getName() != null ? ag.getName() : "",
ag.getLabel() != null ? ag.getLabel() : "",
ag.getLocation() != null ? ag.getLocation() : ""
));
}
}
return affinityGroupList;
}
private static void createVM(@NotNull ComputeManagementClient client,
@NotNull VirtualMachineOperations vmo,
@NotNull VirtualMachine virtualMachine,
@NotNull VirtualMachineImage vmImage,
@NotNull String mediaLocation,
@NotNull String username,
@NotNull String password,
@NotNull byte[] certificate)
throws Exception {
VirtualMachineCreateParameters vmcp = new VirtualMachineCreateParameters(virtualMachine.getName());
if (!virtualMachine.getAvailabilitySet().isEmpty()) {
vmcp.setAvailabilitySetName(virtualMachine.getAvailabilitySet());
}
if (vmImage.getType().equals(USER_IMAGE)) {
vmcp.setVMImageName(vmImage.getName());
vmcp.setMediaLocation(new URI(mediaLocation));
} else if (vmImage.getType().equals(PLATFORM_IMAGE)) {
OSVirtualHardDisk osVHD = new OSVirtualHardDisk();
osVHD.setSourceImageName(vmImage.getName());
osVHD.setMediaLink(new URI(mediaLocation));
vmcp.setOSVirtualHardDisk(osVHD);
}
vmcp.setRoleSize(virtualMachine.getSize());
vmcp.getConfigurationSets().add(getProvisioningConfigurationSet(client, virtualMachine, vmImage,
username, password, certificate));
if (virtualMachine.getEndpoints().size() > 0 || !virtualMachine.getSubnet().isEmpty()) {
vmcp.getConfigurationSets().add(getNetworkConfigurationSet(virtualMachine));
}
OperationStatusResponse osr = vmo.create(virtualMachine.getServiceName(), virtualMachine.getDeploymentName(), vmcp);
validateOperationStatus(osr);
}
private static void createVMDeployment(@NotNull ComputeManagementClient client,
@NotNull VirtualMachineOperations vmo,
@NotNull VirtualMachine virtualMachine,
@NotNull VirtualMachineImage vmImage,
@NotNull String mediaLocation,
@NotNull String virtualNetwork,
@NotNull String username,
@NotNull String password,
@NotNull byte[] certificate)
throws Exception {
VirtualMachineCreateDeploymentParameters vmcdp = new VirtualMachineCreateDeploymentParameters();
vmcdp.setName(virtualMachine.getName());
vmcdp.setLabel(virtualMachine.getName());
vmcdp.setDeploymentSlot(DeploymentSlot.Production);
if (!virtualNetwork.isEmpty()) {
vmcdp.setVirtualNetworkName(virtualNetwork);
}
Role role = new Role();
role.setRoleName(virtualMachine.getName());
if (!virtualMachine.getAvailabilitySet().isEmpty()) {
role.setAvailabilitySetName(virtualMachine.getAvailabilitySet());
}
if (vmImage.getType().equals("User")) {
role.setVMImageName(vmImage.getName());
role.setMediaLocation(new URI(mediaLocation));
} else if (vmImage.getType().equals("Platform")) {
OSVirtualHardDisk osVHD = new OSVirtualHardDisk();
osVHD.setSourceImageName(vmImage.getName());
osVHD.setMediaLink(new URI(mediaLocation));
role.setOSVirtualHardDisk(osVHD);
}
role.setRoleSize(virtualMachine.getSize());
role.setRoleType(PERSISTENT_VM_ROLE);
role.getConfigurationSets().add(getProvisioningConfigurationSet(client, virtualMachine, vmImage,
username, password, certificate));
if (virtualMachine.getEndpoints().size() > 0 || !virtualMachine.getSubnet().isEmpty()) {
role.getConfigurationSets().add(getNetworkConfigurationSet(virtualMachine));
}
vmcdp.getRoles().add(role);
OperationStatusResponse osr = vmo.createDeployment(virtualMachine.getServiceName(), vmcdp);
validateOperationStatus(osr);
}
@NotNull
private static ConfigurationSet getProvisioningConfigurationSet(@NotNull ComputeManagementClient client,
@NotNull VirtualMachine virtualMachine,
@NotNull VirtualMachineImage vmImage,
@NotNull String username,
@NotNull String password,
@NotNull byte[] certificate)
throws AzureCmdException {
ConfigurationSet provConfSet = new ConfigurationSet();
if (vmImage.getOperatingSystemType().equals(WINDOWS_OS_TYPE)) {
provConfSet.setConfigurationSetType(WINDOWS_PROVISIONING_CONFIGURATION);
provConfSet.setAdminUserName(username);
provConfSet.setAdminPassword(password);
provConfSet.setComputerName(String.format("%s-%s-%02d",
virtualMachine.getServiceName().substring(0, 5),
virtualMachine.getName().substring(0, 5),
1));
} else if (vmImage.getOperatingSystemType().equals(LINUX_OS_TYPE)) {
provConfSet.setConfigurationSetType(LINUX_PROVISIONING_CONFIGURATION);
provConfSet.setUserName(username);
if (!password.isEmpty()) {
provConfSet.setUserPassword(password);
provConfSet.setDisableSshPasswordAuthentication(false);
}
if (certificate.length > 0) {
try {
String fingerprint = createServiceCertificate(virtualMachine.getServiceName(),
certificate,
"").execute(client);
SshSettings sshSettings = new SshSettings();
String keyLocation = String.format("/home/%s/.ssh/authorized_keys", username);
sshSettings.getPublicKeys().add(new SshSettingPublicKey(fingerprint, keyLocation));
provConfSet.setSshSettings(sshSettings);
} catch (Throwable throwable) {
if (throwable instanceof AzureCmdException) {
throw (AzureCmdException) throwable;
} else if (throwable instanceof ExecutionException) {
throw new AzureCmdException(throwable.getCause().getMessage(), throwable.getCause());
}
throw new AzureCmdException(throwable.getMessage(), throwable);
}
}
provConfSet.setHostName(String.format("%s-%s-%02d",
virtualMachine.getServiceName().substring(0, 5),
virtualMachine.getName().substring(0, 5),
1));
}
return provConfSet;
}
@NotNull
private static ConfigurationSet getNetworkConfigurationSet(@NotNull VirtualMachine virtualMachine) {
ConfigurationSet netConfSet = new ConfigurationSet();
netConfSet.setConfigurationSetType(NETWORK_CONFIGURATION);
ArrayList<InputEndpoint> inputEndpoints = netConfSet.getInputEndpoints();
for (Endpoint endpoint : virtualMachine.getEndpoints()) {
InputEndpoint inputEndpoint = new InputEndpoint();
inputEndpoint.setName(endpoint.getName());
inputEndpoint.setProtocol(endpoint.getProtocol());
inputEndpoint.setLocalPort(endpoint.getPrivatePort());
inputEndpoint.setPort(endpoint.getPublicPort());
inputEndpoints.add(inputEndpoint);
}
if (!virtualMachine.getSubnet().isEmpty()) {
netConfSet.getSubnetNames().add(virtualMachine.getSubnet());
}
return netConfSet;
}
@NotNull
private static VirtualMachine.Status getVMStatus(@NotNull DeploymentGetResponse deployment, @NotNull Role role) {
VirtualMachine.Status result = VirtualMachine.Status.Unknown;
if (deployment.getRoleInstances() != null) {
RoleInstance vmRoleInstance = null;
for (RoleInstance roleInstance : deployment.getRoleInstances()) {
if (roleInstance.getRoleName() != null && roleInstance.getRoleName().equals(role.getRoleName())) {
vmRoleInstance = roleInstance;
break;
}
}
if (vmRoleInstance != null && vmRoleInstance.getInstanceStatus() != null) {
result = getRoleStatus(vmRoleInstance.getInstanceStatus());
}
}
return result;
}
@NotNull
private static VirtualMachine.Status getRoleStatus(@NotNull String instanceStatus) {
VirtualMachine.Status result = VirtualMachine.Status.Unknown;
if (instanceStatus.equals(StatusLiterals.UNKNOWN)) {
result = VirtualMachine.Status.Unknown;
} else if (instanceStatus.equals(StatusLiterals.READY_ROLE)) {
result = VirtualMachine.Status.Ready;
} else if (instanceStatus.equals(StatusLiterals.STOPPED_VM)) {
result = VirtualMachine.Status.Stopped;
} else if (instanceStatus.equals(StatusLiterals.STOPPED_DEALLOCATED)) {
result = VirtualMachine.Status.StoppedDeallocated;
} else if (instanceStatus.equals(StatusLiterals.BUSY_ROLE)) {
result = VirtualMachine.Status.Busy;
} else if (instanceStatus.equals(StatusLiterals.CREATING_VM) ||
instanceStatus.equals(StatusLiterals.CREATING_ROLE)) {
result = VirtualMachine.Status.Creating;
} else if (instanceStatus.equals(StatusLiterals.STARTING_VM) ||
instanceStatus.equals(StatusLiterals.STARTING_ROLE)) {
result = VirtualMachine.Status.Starting;
} else if (instanceStatus.equals(StatusLiterals.STOPPING_VM) ||
instanceStatus.equals(StatusLiterals.STOPPING_ROLE)) {
result = VirtualMachine.Status.Stopping;
} else if (instanceStatus.equals(StatusLiterals.DELETING_VM)) {
result = VirtualMachine.Status.Deleting;
} else if (instanceStatus.equals(StatusLiterals.RESTARTING_ROLE)) {
result = VirtualMachine.Status.Restarting;
} else if (instanceStatus.equals(StatusLiterals.CYCLING_ROLE)) {
result = VirtualMachine.Status.Cycling;
} else if (instanceStatus.equals(StatusLiterals.FAILED_STARTING_VM) ||
instanceStatus.equals(StatusLiterals.FAILED_STARTING_ROLE)) {
result = VirtualMachine.Status.FailedStarting;
} else if (instanceStatus.equals(StatusLiterals.UNRESPONSIVE_ROLE)) {
result = VirtualMachine.Status.Unresponsive;
} else if (instanceStatus.equals(StatusLiterals.PREPARING)) {
result = VirtualMachine.Status.Preparing;
}
return result;
}
@NotNull
private static String bytesToHex(@NotNull byte[] bytes) {
char[] hexChars = new char[bytes.length * 2];
for (int j = 0; j < bytes.length; j++) {
int v = bytes[j] & 0xFF;
hexChars[j * 2] = HEX_ARRAY[v >>> 4];
hexChars[j * 2 + 1] = HEX_ARRAY[v & 0x0F];
}
return new String(hexChars);
}
@Nullable
private static Configuration getConfigurationFromAuthToken(@NotNull String subscriptionId)
throws SAXException, ParserConfigurationException, XPathExpressionException, IOException {
// NOTE: This implementation has to be considered as somewhat hacky. It relies on certain
// internal implementation details of the Azure SDK for Java. For example we supply null
// values for the key store location and password and specify a key store type value
// though it will not be used. We also supply a no-op "credential provider". Ideally we want
// the SDK to directly support the scenario we need.
String azureServiceManagementUri = DefaultLoader.getPluginComponent().getSettings().getAzureServiceManagementUri();
ClassLoader old = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(AzureManagerImpl.class.getClassLoader());
try {
// create a default configuration object
Configuration configuration = ManagementConfiguration.configure(
URI.create(azureServiceManagementUri),
subscriptionId, null, null, KeyStoreType.pkcs12);
if (configuration != null) {
// replace the credential provider with a custom one that does nothing
configuration.setProperty(
ManagementConfiguration.SUBSCRIPTION_CLOUD_CREDENTIALS,
new EmptyCloudCredentials(subscriptionId));
// remove the SSL connection factory in case one was added; this is needed
// in the case when the user switches from subscription based auth to A/D
// sign-in because in that scenario the CertificateCloudCredentials class
// would have added an SSL connection factory object to the configuration
// object which would then be used when making the SSL call to the Azure
// service management API. This tells us that the configuration object is
// reused across calls to ManagementConfiguration.configure. The SSL connection
// factory object so configured will attempt to use certificate based auth
// which will fail since we don't have a certificate handy when using A/D auth.
configuration.getProperties().remove(ApacheConfigurationProperties.PROPERTY_SSL_CONNECTION_SOCKET_FACTORY);
}
return configuration;
} finally {
Thread.currentThread().setContextClassLoader(old);
}
}
@Nullable
private static Configuration getConfigurationFromCertificate(@NotNull String subscriptionId,
@NotNull String managementCertificate,
@NotNull String serviceManagementUrl)
throws KeyStoreException, CertificateException, NoSuchAlgorithmException, IOException,
ParserConfigurationException, XPathExpressionException, SAXException, AzureCmdException {
String keyStorePath = File.createTempFile("azk", null).getPath();
initKeyStore(
managementCertificate,
OpenSSLHelper.PASSWORD,
keyStorePath,
OpenSSLHelper.PASSWORD);
ClassLoader old = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(AzureManagerImpl.class.getClassLoader());
try {
return ManagementConfiguration.configure(URI.create(serviceManagementUrl), subscriptionId, keyStorePath,
OpenSSLHelper.PASSWORD, KeyStoreType.pkcs12);
} finally {
Thread.currentThread().setContextClassLoader(old);
}
}
private static void initKeyStore(@NotNull String base64Certificate, @NotNull String certificatePwd,
@NotNull String keyStorePath, @NotNull String keyStorePwd)
throws KeyStoreException, IOException, NoSuchAlgorithmException, CertificateException {
FileOutputStream keyStoreOutputStream = new FileOutputStream(keyStorePath);
try {
KeyStore store = KeyStore.getInstance("PKCS12");
store.load(null, null);
final byte[] decode = Base64.decode(base64Certificate);
InputStream sslInputStream = new ByteArrayInputStream(decode);
store.load(sslInputStream, certificatePwd.toCharArray());
// we need to a create a physical key store as well here
store.store(keyStoreOutputStream, keyStorePwd.toCharArray());
} finally {
keyStoreOutputStream.close();
}
}
} | {
"content_hash": "920c986de8190aed9a6a14d88bcc9097",
"timestamp": "",
"source": "github",
"line_count": 1895,
"max_line_length": 189,
"avg_line_length": 45.82058047493404,
"alnum_prop": 0.6074974087297017,
"repo_name": "leotilli/msopentech-tools-for-intellij",
"id": "c11da616b98473be16ac7ef8eeb60b992fac4491",
"size": "87457",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "src/common/src/com/microsoftopentechnologies/tooling/msservices/helpers/azure/sdk/AzureSDKHelper.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "5380"
},
{
"name": "CSS",
"bytes": "178"
},
{
"name": "HTML",
"bytes": "17509"
},
{
"name": "Haskell",
"bytes": "680"
},
{
"name": "Java",
"bytes": "2069954"
},
{
"name": "JavaScript",
"bytes": "254"
}
],
"symlink_target": ""
} |
#include "llvm/ADT/ArrayRef.h"
#include "llvm/ADT/STLExtras.h"
#include "llvm/ADT/SmallVector.h"
#include "mlir-hlo/Dialect/lhlo/IR/lhlo_ops.h"
#include "mlir/Dialect/Arith/IR/Arith.h"
#include "mlir/Dialect/Func/IR/FuncOps.h"
#include "mlir/Dialect/Linalg/IR/Linalg.h"
#include "mlir/Dialect/MemRef/IR/MemRef.h"
#include "mlir/Dialect/SCF/IR/SCF.h"
#include "mlir/IR/BuiltinTypes.h"
#include "mlir/Pass/Pass.h"
#include "mlir/Transforms/DialectConversion.h"
namespace mlir {
namespace lmhlo {
#define GEN_PASS_DEF_LHLOLEGALIZETOPARALLELLOOPSPASS
#include "mlir-hlo/Dialect/lhlo/transforms/lmhlo_passes.h.inc"
namespace {
// Clones and adapts the code in `lhlo_block` that works on buffers and has a
// single output buffer to make it compatible with `operands` that have element
// types of the respective buffers. Returns the computed value.
//
// Example. For `operands` with (f32, i32) types and a block with LHLO ops and
// with signature:
// ^bb(%lhs: memref<f32>, %rhs: memref<i32>, %res: memref<i1>):
// <LHLO_ops>
//
// inserts necessary alloc and store ops to compute and return result that has
// `i1` type.
Value applySingleResultLhloCode(Location loc, ValueRange operands,
Block* lhloBlock, OpBuilder* b) {
SmallVector<Value, 2> argBufs;
for (auto argType : lhloBlock->getArgumentTypes()) {
argBufs.push_back(
b->create<memref::AllocOp>(loc, argType.cast<MemRefType>()));
}
for (const auto& operand : llvm::enumerate(operands)) {
b->create<memref::StoreOp>(loc, operand.value(), argBufs[operand.index()]);
}
// Clone the ops from `lhlo_block`.
BlockAndValueMapping mapping;
mapping.map(lhloBlock->getArguments(), argBufs);
for (auto& nested : lhloBlock->without_terminator()) {
auto* clone = b->clone(nested, mapping);
mapping.map(nested.getResults(), clone->getResults());
}
return b->create<memref::LoadOp>(loc, argBufs.back());
}
// Converts a block with LHLO ops and with signature:
// ^bb(%lhs: memref<f32>, %rhs: memref<f32>, %res: memref<f32>):
// into a reduction operator of scf.reduce by doing buffer allocation for
// scalar arguments and the result of `scf.reduce` to make it compatible with
// LHLO ops.
void convertToReductionOperator(Location loc, scf::ReduceOp reduceOp,
Block* lhloBlock, OpBuilder* b) {
Block& loopReduceOpBody = reduceOp.getReductionOperator().front();
OpBuilder::InsertionGuard guard(*b);
b->setInsertionPointToStart(&loopReduceOpBody);
b->create<scf::ReduceReturnOp>(
loc, applySingleResultLhloCode(loc, loopReduceOpBody.getArguments(),
lhloBlock, b));
}
// Returns result of arith::ConstantOp if `dim` is static, otherwise uses DimOp
// to extract dimension at runtime.
Value getStaticOrDynamicDim(mlir::Location loc, Value shapedValue,
size_t dimIndex, int64_t dim, OpBuilder* b) {
return dim == ShapedType::kDynamicSize
? (Value)b->create<memref::DimOp>(loc, shapedValue, dimIndex)
: (Value)b->create<arith::ConstantIndexOp>(loc, dim);
}
struct MappedIvs {
// False if the mapped indices are in the padding area, true otherwise.
Value inBounds;
// Mapped indices.
SmallVector<Value, 2> ivs;
};
template <typename OpTy>
MappedIvs mapWindowIvsToInput(OpTy op, Value operand, ValueRange ivs,
ValueRange windowIvs, OpBuilder* b) {
MappedIvs mappedIvs;
if (!op.getWindowStrides().has_value()) {
op.emitOpError("No window strides specified.");
}
auto windowStrides = op.getWindowStrides().value();
if (!op.getPadding().has_value()) {
op.emitOpError("No padding specified.");
}
auto padding = op.getPadding().value();
auto loc = op.getLoc();
auto operandShape = operand.getType().template cast<MemRefType>().getShape();
// `in_bounds` is false when the mapped indices are in the padding area.
mappedIvs.inBounds = b->create<mlir::arith::ConstantOp>(
loc, b->getI1Type(), b->getIntegerAttr(b->getI1Type(), 1));
for (unsigned i = 0, e = ivs.size(); i < e; ++i) {
auto stride = windowStrides.template getValues<llvm::APInt>()[i];
auto padLow = padding.template getValues<llvm::APInt>()[{i, 0}];
Value strideVal =
b->create<arith::ConstantIndexOp>(loc, stride.getSExtValue());
Value padLowVal =
b->create<arith::ConstantIndexOp>(loc, padLow.getSExtValue());
Value center = b->create<arith::MulIOp>(loc, ivs[i], strideVal);
Value offset = b->create<arith::SubIOp>(loc, windowIvs[i], padLowVal);
Value index = b->create<arith::AddIOp>(loc, center, offset);
Value upperBound =
getStaticOrDynamicDim(loc, operand, i, operandShape[i], b);
// We must check whether 0 <= index_i < shape_i, as otherwise we are in
// the pad and then we have to use the neutral element for reduction.
// Equivalently, it can be computed as the unsigned comparison index_i <
// shape_i, since a negative value wraps to a large positive value.
mappedIvs.inBounds = b->create<mlir::arith::AndIOp>(
loc, mappedIvs.inBounds,
b->create<arith::CmpIOp>(loc, arith::CmpIPredicate::ult, index,
upperBound));
mappedIvs.ivs.push_back(index);
}
return mappedIvs;
}
// Returns scf::Parallel over a shaped value with static or dynamic shape.
scf::ParallelOp makeLoopOverShape(Location loc, Value shapedValue,
OpBuilder* b) {
Value zero = b->create<arith::ConstantIndexOp>(loc, 0);
Value one = b->create<arith::ConstantIndexOp>(loc, 1);
ArrayRef<int64_t> shape = shapedValue.getType().cast<ShapedType>().getShape();
SmallVector<Value, 2> lower, upper, step;
for (const auto& dim : llvm::enumerate(shape)) {
upper.push_back(
getStaticOrDynamicDim(loc, shapedValue, dim.index(), dim.value(), b));
lower.push_back(zero);
step.push_back(one);
}
return b->create<scf::ParallelOp>(loc, lower, upper, step);
}
// Converts `lmhlo.ReduceOp` into two scf::ParallelOp and a scf::ReduceOp.
// The outper `ParallelOp` refers to the parallel loops if there are
// any. The inner `ParalleOp` refers to the reduction loops and `ReduceOp`
// contains the reduction operator.
//
// Example:
//
// "lmhlo.reduce"(%buffer, %init_buf, %result) ({
// ^bb0(%lhs: memref<f32>, %rhs: memref<f32>, %res: memref<f32>):
// <LHLO ops>
// } ) {dimensions = dense<[1]> : tensor<1xi64>}
// : (memref<100x10x5xf32>, memref<f32>, memref<100x5xf32>) -> ()
//
// is roughly converted into:
//
// %init = load %init_buf[] : memref<f32>
// scf.parallel (%i, %k) = (%c0, %c0) to (%c100, %c5) step (%c1, %c1) {
// %result = scf.parallel (%j) = (%c0) to (%c10) step (%c1) init (%init) {
// %elem_to_reduce = load %buffer[%i, %j, %k] : memref<100x10x5xf32>
// scf.reduce(%elem_to_reduce) {
// ^bb0(%elem: f32, %acc: f32):
// elem_buf = alloc() : memref<f32>
// store %elem, elem_buf[] : memref<f32>
// acc_buf = alloc() : memref<f32>
// store %acc, acc_buf[] : memref<f32>
// <LHLO_ops>
// %acc_result = load acc_buf[] : memref<f32>
// scf.reduce.return %acc_result : f32
// } : f32
// scf.yield
// } : f32
// scf.yield
// }
class ReduceOpConverter : public OpConversionPattern<lmhlo::ReduceOp> {
public:
using OpConversionPattern<lmhlo::ReduceOp>::OpConversionPattern;
LogicalResult matchAndRewrite(
lmhlo::ReduceOp reduceOp, OpAdaptor /*adaptor*/,
ConversionPatternRewriter& rewriter) const final {
// TODO(b/183977252) : Handle variadic ReduceOp/ReduceWindowOp
if (reduceOp.getOut().size() != 1) return failure();
scf::ReduceOp scfReduceOp =
createReduceOpInNestedParallelLoops(reduceOp, &rewriter);
convertToReductionOperator(reduceOp.getLoc(), scfReduceOp,
&reduceOp.getBody().front(), &rewriter);
rewriter.replaceOp(reduceOp, llvm::None);
return success();
}
private:
// Creates nested `scf.parallel` ops with `scf.reduce`. The outer ParallelOp
// refers to the parallel dimensions of `reduce_op` if any and the inner
// ParallelOp refers to the reduction dimensions. The scf.reduce op is
// returned.
//
// If the reduction argument is a memref<100x10x5xf32> and the
// reduction is performed along dimension 1 then this method will generate
//
// %init = load %init_buf[] : memref<f32>
// scf.parallel (%i, %k) = (%c0, %c0) to (%c100, %c5) step (%c1, %c1) {
// %result = scf.parallel (%j) = (%c0) to (%c10) step (%c1) init (%init) {
// %elem_to_reduce = load %buffer[%i, %j, %k] : memref<100x10x5xf32>
// scf.reduce(%elem_to_reduce) {
// <THE BLOCK PTR TO BE RETURNED>
// } : f32
// scf.yield
// } : f32
// scf.yield
// }
scf::ReduceOp createReduceOpInNestedParallelLoops(
lmhlo::ReduceOp reduceOp, ConversionPatternRewriter* rewriter) const {
auto loc = reduceOp.getLoc();
DenseSet<int> reducingDims;
for (const auto& rdim : reduceOp.getDimensions().getValues<APInt>()) {
reducingDims.insert(rdim.getSExtValue());
}
Value operand = reduceOp.getInputs().front();
Value out = reduceOp.getOut().front();
SmallVector<Value, 2> parallelLower, parallelUpper, parallelStep;
SmallVector<Value, 2> reduceLower, reduceUpper, reduceStep;
auto operandShape = operand.getType().cast<MemRefType>().getShape();
for (const auto& dim : llvm::enumerate(operandShape)) {
const bool isReducingDim = reducingDims.count(dim.index());
Value ub = getStaticOrDynamicDim(loc, operand, dim.index(), dim.value(),
rewriter);
Value lb = rewriter->create<arith::ConstantIndexOp>(loc, 0);
Value step = rewriter->create<arith::ConstantIndexOp>(loc, 1);
(isReducingDim ? reduceLower : parallelLower).push_back(lb);
(isReducingDim ? reduceUpper : parallelUpper).push_back(ub);
(isReducingDim ? reduceStep : parallelStep).push_back(step);
}
// Load initial value from memref<element_type>.
SmallVector<Value, 1> initValue = {rewriter->create<memref::LoadOp>(
loc, *reduceOp.getInitValues().begin())};
// Outer ParallelOp is not needed if it is a reduction across all dims.
scf::ParallelOp outer;
if (!parallelLower.empty()) {
outer = rewriter->create<scf::ParallelOp>(loc, parallelLower,
parallelUpper, parallelStep);
rewriter->setInsertionPointToStart(outer.getBody());
}
scf::ParallelOp inner = rewriter->create<scf::ParallelOp>(
loc, reduceLower, reduceUpper, reduceStep, ValueRange(initValue));
Value reductionResult = *inner.getResults().begin();
SmallVector<Value, 1> outIndices;
if (outer != nullptr) {
outIndices.reserve(outer.getNumLoops());
for (Value iv : outer.getInductionVars()) {
outIndices.push_back(iv);
}
} else {
outIndices.push_back(rewriter->create<arith::ConstantIndexOp>(loc, 0));
}
rewriter->create<memref::StoreOp>(loc, reductionResult, out, outIndices);
// Load the element to reduce.
SmallVector<Value, 2> indices;
indices.reserve(operandShape.size());
if (outer) {
auto innerIvsIt = inner.getInductionVars().begin();
auto outerIvsIt = outer.getInductionVars().begin();
for (unsigned i = 0, e = operandShape.size(); i < e; ++i) {
indices.push_back(reducingDims.count(i) ? *innerIvsIt++
: *outerIvsIt++);
}
} else {
indices = inner.getInductionVars();
}
rewriter->setInsertionPointToStart(inner.getBody());
Value elem = rewriter->create<mlir::memref::LoadOp>(
loc, reduceOp.getInputs().front(), indices);
return rewriter->create<scf::ReduceOp>(loc, elem);
}
};
// Pseudocode:
// for each index O in output
// accumulator = neutral_value
// in_bounds = true
// for each index W in window
// for each dimension i from 0 to rank - 1
// index = O[i] * stride[i] + W[i] - pad_low[i]
// in_bounds = inbounds && (index `ult` shape[i])
// I[i] = index
// if (in_bounds)
// value = input[I]
// else
// value = neutral_value
// accumulator = reduction_operator(accumulator, value)
// output[O] = accumulator
//
// Converts `lmhlo.ReduceWindowOp` into two scf::ParallelOp and a
// scf::ReduceOp.
// The outper `ParallelOp` refers to the parallel loops that traverese output
// buffer. The inner `ParalleOp` refers to the reduction loops that traverse
// reduction windows and `ReduceOp` contains the reduction operator.
//
// Example:
//
// func @reduce_window(%arg: memref<112x112xf32>,
// %init: memref<f32>,
// %result: memref<56x56xf32>) {
// "lmhlo.reduce_window"(%arg, %init, %result) ({
// ^bb0(%lhs: memref<f32>, %rhs: memref<f32>, %res: memref<f32>):
// "lmhlo.maximum"(%lhs, %rhs, %res)
// : (memref<f32>, memref<f32>, memref<f32>) -> ()
// "lmhlo.terminator"() : () -> ()
// }) {
// padding = dense<[[0, 1], [0, 1]]> : tensor<2x2xi64>,
// window_dimensions = dense<[3, 3]> : tensor<2xi64>,
// window_strides = dense<[2, 2]> : tensor<2xi64>
// } : (memref<112x112xf32>, memref<f32>, memref<56x56xf32>) -> ()
// return
// }
//
// is roughly converted into:
//
// %neutral_elem = load %init_buf[] : memref<f32>
// scf.parallel (%i, %j) = (%c0, %c0) to (%c56, %c56) step (%c1, %c1) {
// %result = scf.parallel (%iw, %jw) = (%c0, %c0)
// to (%c3, %c3) step (%c1, %c1) neutral_elem (%0) -> f32 {
// %in_bounds = <COMPUTE IF INDEX IS IN OPERAND'S pad>
// %elem = load %operand[%computed_i, %computed_j]
// %elem_or_neutral = select %in_bounds, %elem, %neutral_elem : f32
// scf.reduce(%elem_to_reduce) : f32 {
// ^bb0(%arg7: f32, %arg8: f32):
// <LHLO ops>
// }
// scf.yield
// }
// store %result, %output_buffer[%i, %j] : memref<56x56xf32>
// scf.yield
// }
// return
// }
class ReduceWindowOpConverter
: public OpConversionPattern<lmhlo::ReduceWindowOp> {
public:
using OpConversionPattern<lmhlo::ReduceWindowOp>::OpConversionPattern;
LogicalResult matchAndRewrite(
lmhlo::ReduceWindowOp reduceWindowOp, OpAdaptor /*adaptor*/,
ConversionPatternRewriter& rewriter) const final {
// TODO(b/183977252) : Handle variadic ReduceOp/ReduceWindowOp
if (reduceWindowOp.getOut().size() != 1) return failure();
scf::ParallelOp outputLoop, windowLoop;
std::tie(outputLoop, windowLoop) =
createParallelLoopsToTraverseOutputAndWindow(reduceWindowOp, &rewriter);
scf::ReduceOp reduceOp = createReduceOpInNestedParallelLoops(
reduceWindowOp, outputLoop, windowLoop, &rewriter);
convertToReductionOperator(reduceWindowOp.getLoc(), reduceOp,
&reduceWindowOp.getBody().front(), &rewriter);
rewriter.replaceOp(reduceWindowOp, llvm::None);
return success();
}
private:
std::pair<scf::ParallelOp, scf::ParallelOp>
createParallelLoopsToTraverseOutputAndWindow(
lmhlo::ReduceWindowOp reduceWindowOp,
ConversionPatternRewriter* rewriter) const {
auto loc = reduceWindowOp.getLoc();
Value initValue = rewriter->create<memref::LoadOp>(
loc, reduceWindowOp.getInitValues()[0]);
Value zero = rewriter->create<arith::ConstantIndexOp>(loc, 0);
Value one = rewriter->create<arith::ConstantIndexOp>(loc, 1);
// Create an outer parallel loop that spans the output of ReduceWindowOp.
Value output = reduceWindowOp.getOut()[0];
auto outputLoop = makeLoopOverShape(loc, output, rewriter);
// Create a nested loop that traverses the window.
SmallVector<Value, 2> windowLower, windowUpper, windowStep;
rewriter->setInsertionPointToStart(outputLoop.getBody());
for (const auto& windowDim : reduceWindowOp.getWindowDimensions()) {
windowStep.push_back(one);
windowLower.push_back(zero);
windowUpper.push_back(rewriter->create<arith::ConstantIndexOp>(
loc, windowDim.getSExtValue()));
}
auto windowLoop = rewriter->create<scf::ParallelOp>(
loc, windowLower, windowUpper, windowStep, ValueRange(initValue));
Value reductionResult = *windowLoop.getResults().begin();
auto outputIvs = outputLoop.getInductionVars();
rewriter->create<memref::StoreOp>(loc, reductionResult, output, outputIvs);
return std::make_pair(outputLoop, windowLoop);
}
scf::ReduceOp createReduceOpInNestedParallelLoops(
lmhlo::ReduceWindowOp reduceWindowOp, scf::ParallelOp outputLoop,
scf::ParallelOp windowLoop, ConversionPatternRewriter* rewriter) const {
rewriter->setInsertionPointToStart(windowLoop.getBody());
auto loc = reduceWindowOp.getLoc();
if (reduceWindowOp.getBaseDilations().has_value() ||
reduceWindowOp.getWindowDilations().has_value()) {
reduceWindowOp.emitRemark(
"Lowering to parallel loops does not support `base_dilations` or "
"`window_dilations` attributes yet. The attributes will be ignored.");
}
Value input = reduceWindowOp.getInputs()[0];
auto inputType = input.getType().cast<MemRefType>();
// Compute ivs in 'arg' buffer and whether these ivs are in pad area or not.
MappedIvs mappedIvs = mapWindowIvsToInput(
reduceWindowOp, input, outputLoop.getInductionVars(),
windowLoop.getInductionVars(), rewriter);
auto elemOrInit = rewriter->create<scf::IfOp>(
loc, inputType.getElementType(), mappedIvs.inBounds,
/*withElseRegion=*/true);
OpBuilder thenBuilder =
elemOrInit.getThenBodyBuilder(rewriter->getListener());
Value elem =
thenBuilder.create<mlir::memref::LoadOp>(loc, input, mappedIvs.ivs);
thenBuilder.create<scf::YieldOp>(loc, elem);
OpBuilder elseBuilder =
elemOrInit.getElseBodyBuilder(rewriter->getListener());
elseBuilder.create<scf::YieldOp>(loc, *windowLoop.getInitVals().begin());
return rewriter->create<scf::ReduceOp>(loc,
*elemOrInit.getResults().begin());
}
};
// See the operation semantics in
// https://www.tensorflow.org/xla/operation_semantics#selectandscatter
//
// Pseudocode:
// scf.parallel(coordinates O in the output):
// output[O] = init
// scf.parallel(coordinates S in the source):
// selected_ivs = 0
// selected_val = 0
// initialized_flag = false
// scf.for (first dim W_1 in the window)
// iter_args (selected_ivs, selected_val, initialized_flag):
// ...
// scf.for (last dim W_N in the window):
// iter_args (selected_ivs, selected_val, initialized_flag):
// I = S * stride + W - pad_low
// if I within bounds of operand:
// if (initialized_flag):
// pred = select(selected_value, operand(I))):
// if (pred)
// selected_value = operand(I)
// selected_index = I
// else
// selected_value = operand(I)
// selected_index = I
// initialized_flag = true
// output(selected_index) = scatter(output(selected_index), source(S))
class SelectAndScatterOpConverter
: public OpConversionPattern<lmhlo::SelectAndScatterOp> {
public:
using OpConversionPattern<lmhlo::SelectAndScatterOp>::OpConversionPattern;
LogicalResult matchAndRewrite(
lmhlo::SelectAndScatterOp sAndSOp, OpAdaptor /*adaptor*/,
ConversionPatternRewriter& rewriter) const final {
auto loc = sAndSOp.getLoc();
initializeOutput(sAndSOp, &rewriter);
scf::ParallelOp loopOverSrc =
makeLoopOverShape(loc, sAndSOp.getSource(), &rewriter);
rewriter.setInsertionPointToStart(loopOverSrc.getBody());
// Compute indices of the selected element in the window.
auto selectedIvs = selectIvs(sAndSOp, loopOverSrc, &rewriter);
// Load `source[selected_ivs]`.
auto srcElem = rewriter.create<memref::LoadOp>(
loc, sAndSOp.getSource(), loopOverSrc.getInductionVars());
// Compute `out[selected_ivs]` = scatter(out[selected_ivs], src_element)`.
auto rmw = rewriter.create<memref::GenericAtomicRMWOp>(
loc, sAndSOp.getOut(), selectedIvs);
OpBuilder rmwBuilder = OpBuilder::atBlockEnd(rmw.getBody());
auto accResult =
applySingleResultLhloCode(loc, {srcElem, rmw.getCurrentValue()},
&sAndSOp.getScatter().front(), &rmwBuilder);
rmwBuilder.create<memref::AtomicYieldOp>(loc, accResult);
rewriter.replaceOp(sAndSOp, llvm::None);
return success();
}
private:
void initializeOutput(lmhlo::SelectAndScatterOp sAndSOp, OpBuilder* b) const {
auto loc = sAndSOp.getLoc();
Value initValue = b->create<memref::LoadOp>(loc, sAndSOp.getInitValue());
scf::ParallelOp loopOverOutput =
makeLoopOverShape(loc, sAndSOp.getOut(), b);
OpBuilder::InsertionGuard guard(*b);
b->setInsertionPointToStart(loopOverOutput.getBody());
b->create<memref::StoreOp>(loc, initValue, sAndSOp.getOut(),
loopOverOutput.getInductionVars());
}
struct WindowLoops {
SmallVector<Value, 2> selectedIvs;
SmallVector<Value, 2> windowIvs;
scf::ForOp innerLoop;
};
WindowLoops insertWindowLoops(lmhlo::SelectAndScatterOp sAndSOp,
scf::ParallelOp loopOverSrc,
OpBuilder* b) const {
auto loc = sAndSOp.getLoc();
Value zero = b->create<arith::ConstantIndexOp>(loc, 0);
Value one = b->create<arith::ConstantIndexOp>(loc, 1);
auto elementType =
sAndSOp.getOut().getType().cast<MemRefType>().getElementType();
auto rank = loopOverSrc.getNumLoops();
// `iter_args` = [iv_1, ..., iv_N, selected_value, is_initialized]
SmallVector<Value, 4> iterArgs(rank, zero);
iterArgs.push_back(b->create<mlir::arith::ConstantOp>(
loc, elementType, b->getFloatAttr(elementType, 0)));
iterArgs.push_back(b->create<mlir::arith::ConstantOp>(
loc, b->getI1Type(), b->getIntegerAttr(b->getI1Type(), 0)));
// Create a nested loop that traverses the window.
OpBuilder::InsertPoint ip;
WindowLoops result;
for (const auto& windowDim :
sAndSOp.getWindowDimensions()->getValues<APInt>()) {
Value upper =
b->create<arith::ConstantIndexOp>(loc, windowDim.getSExtValue());
result.innerLoop = b->create<scf::ForOp>(loc, zero, upper, one, iterArgs);
if (b->getInsertionBlock() == loopOverSrc.getBody()) {
ip = b->saveInsertionPoint();
result.selectedIvs = result.innerLoop.getResults().take_front(rank);
} else {
b->create<scf::YieldOp>(loc, result.innerLoop.getResults());
}
b->setInsertionPointToStart(result.innerLoop.getBody());
iterArgs = ValueRange{result.innerLoop.getRegionIterArgs()};
result.windowIvs.push_back(result.innerLoop.getInductionVar());
}
b->restoreInsertionPoint(ip);
return result;
}
// Adapter to store iteration arguments of sequential loops that perform
// select in a window.
class IterArgs {
public:
explicit IterArgs(ValueRange ivsValFlag) : ivsValFlag(ivsValFlag) {}
IterArgs(ValueRange ivs, Value value, Value flag) {
ivsValFlag = ivs;
ivsValFlag.push_back(value);
ivsValFlag.push_back(flag);
}
ArrayRef<Value> toVector() const { return ivsValFlag; }
// Indices of the currently selected value.
ArrayRef<Value> ivs() const { return toVector().drop_back(2); }
// Currently selected value w.r.t. select() function.
Value value() const { return ivsValFlag.end()[-2]; }
// i1 flag if value() and ivs() were initialized.
Value isInit() const { return ivsValFlag.back(); }
private:
// Vector that stores iv_1, ..., iv_N, value, init.
SmallVector<Value, 4> ivsValFlag;
};
SmallVector<Value, 2> selectIvs(lmhlo::SelectAndScatterOp sAndSOp,
scf::ParallelOp loopOverSrc,
OpBuilder* b) const {
auto loc = sAndSOp.getLoc();
WindowLoops windowLoops = insertWindowLoops(sAndSOp, loopOverSrc, b);
auto innerLoopB = OpBuilder::atBlockEnd(windowLoops.innerLoop.getBody());
// Compute ivs in 'arg' buffer and whether these ivs are in the pad area.
MappedIvs mappedIvs = mapWindowIvsToInput(
sAndSOp, sAndSOp.getOperand(), loopOverSrc.getInductionVars(),
windowLoops.windowIvs, &innerLoopB);
IterArgs ivsValFlag(windowLoops.innerLoop.getRegionIterArgs());
auto ifInBounds = innerLoopB.create<scf::IfOp>(
loc, windowLoops.innerLoop.getResultTypes(), mappedIvs.inBounds,
/*withElseRegion=*/true);
// Case when we are inside boundaries of 'arg' and not in the pad area.
{
OpBuilder inBoundsThenB = ifInBounds.getThenBodyBuilder(b->getListener());
auto selectOrInitResults = selectOrInitialize(
sAndSOp, mappedIvs.ivs, &ivsValFlag, &inBoundsThenB);
inBoundsThenB.create<scf::YieldOp>(loc, selectOrInitResults);
}
// Case when we are in the pad.
{
OpBuilder inBoundsElseB = ifInBounds.getElseBodyBuilder(b->getListener());
inBoundsElseB.create<scf::YieldOp>(loc, ivsValFlag.toVector());
}
innerLoopB.create<scf::YieldOp>(loc, ifInBounds.getResults());
return windowLoops.selectedIvs;
}
SmallVector<Value, 4> selectOrInitialize(lmhlo::SelectAndScatterOp sAndSOp,
ArrayRef<Value> operandIvs,
IterArgs* ivsValFlag,
OpBuilder* b) const {
auto loc = sAndSOp.getLoc();
Value trueI1 = b->create<mlir::arith::ConstantOp>(
loc, b->getI1Type(), b->getIntegerAttr(b->getI1Type(), 1));
const TypeRange iterArgTypes{ValueRange{ivsValFlag->toVector()}};
Value operandElem =
b->create<memref::LoadOp>(loc, sAndSOp.getOperand(), operandIvs);
auto ifInit = b->create<scf::IfOp>(loc, iterArgTypes, ivsValFlag->isInit(),
/*withElseRegion=*/true);
// Init == true, i.e. iter args are already initialized with a selected
// element in boundaries of the operand. Select function has to be computed
// here.
{
OpBuilder ifInitThenB = ifInit.getThenBodyBuilder(b->getListener());
auto& lhloSelect = sAndSOp.getSelect().front();
Value pred = applySingleResultLhloCode(
loc, {operandElem, ivsValFlag->value()}, &lhloSelect, &ifInitThenB);
auto ifPred = ifInitThenB.create<scf::IfOp>(loc, iterArgTypes, pred,
/*withElseRegion=*/true);
// Pred == true, therefore pack newly selected ivs, val and init flag back
// to iter_args and return.
{
OpBuilder ifPredThenB = ifPred.getThenBodyBuilder(b->getListener());
ifPredThenB.create<scf::YieldOp>(
loc, IterArgs{operandIvs, operandElem, trueI1}.toVector());
}
// Pred == false, therefore return old iter_args.
{
OpBuilder ifPredElseB = ifPred.getElseBodyBuilder(b->getListener());
ifPredElseB.create<scf::YieldOp>(loc, ivsValFlag->toVector());
}
ifInitThenB.create<scf::YieldOp>(loc, ifPred.getResults());
}
// Init == false, i.e. only pad was visited before and this is the first
// element in the boundaries of the operand.
{
OpBuilder ifInitElseB = ifInit.getElseBodyBuilder(b->getListener());
ifInitElseB.create<scf::YieldOp>(
loc, IterArgs{operandIvs, operandElem, trueI1}.toVector());
}
return ifInit.getResults();
}
};
struct LhloLegalizeToParallelLoopsPass
: public impl::LhloLegalizeToParallelLoopsPassBase<
LhloLegalizeToParallelLoopsPass> {
void getDependentDialects(DialectRegistry& registry) const override {
registry.insert<arith::ArithDialect, func::FuncDialect,
memref::MemRefDialect, scf::SCFDialect>();
}
void runOnOperation() override {
auto func = getOperation();
RewritePatternSet patterns(&getContext());
// clang-format off
patterns.add<
ReduceOpConverter,
ReduceWindowOpConverter,
SelectAndScatterOpConverter
>(func.getContext());
// clang-format on
ConversionTarget target(getContext());
target.addLegalDialect<arith::ArithDialect, linalg::LinalgDialect,
memref::MemRefDialect, func::FuncDialect,
scf::SCFDialect, LmhloDialect>();
target.addIllegalOp<lmhlo::ReduceOp, lmhlo::ReduceWindowOp,
lmhlo::SelectAndScatterOp>();
if (failed(applyPartialConversion(func, target, std::move(patterns)))) {
signalPassFailure();
}
}
};
} // namespace
std::unique_ptr<OperationPass<func::FuncOp>>
createLegalizeLhloToParallelLoopsPass() {
return std::make_unique<LhloLegalizeToParallelLoopsPass>();
}
} // namespace lmhlo
} // namespace mlir
| {
"content_hash": "731d0f9d0b4b80895b3a1580e8846a86",
"timestamp": "",
"source": "github",
"line_count": 727,
"max_line_length": 80,
"avg_line_length": 40.22971114167813,
"alnum_prop": 0.6499128115704175,
"repo_name": "tensorflow/tensorflow-pywrap_tf_optimizer",
"id": "bc6c354ce5a95b149c1da10a34e3777f2fc74086",
"size": "29915",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tensorflow/compiler/xla/mlir_hlo/lib/Dialect/lhlo/transforms/lhlo_legalize_to_parallel_loops.cc",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "36962"
},
{
"name": "C",
"bytes": "1360509"
},
{
"name": "C#",
"bytes": "13584"
},
{
"name": "C++",
"bytes": "124617937"
},
{
"name": "CMake",
"bytes": "183407"
},
{
"name": "Cython",
"bytes": "5003"
},
{
"name": "Dockerfile",
"bytes": "416070"
},
{
"name": "Go",
"bytes": "2104698"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "1074471"
},
{
"name": "Jupyter Notebook",
"bytes": "789401"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "11175525"
},
{
"name": "Makefile",
"bytes": "2760"
},
{
"name": "Objective-C",
"bytes": "169288"
},
{
"name": "Objective-C++",
"bytes": "294187"
},
{
"name": "Pawn",
"bytes": "5552"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "42599764"
},
{
"name": "Roff",
"bytes": "5034"
},
{
"name": "Ruby",
"bytes": "9199"
},
{
"name": "Shell",
"bytes": "619753"
},
{
"name": "Smarty",
"bytes": "89545"
},
{
"name": "SourcePawn",
"bytes": "14607"
},
{
"name": "Starlark",
"bytes": "7521293"
},
{
"name": "Swift",
"bytes": "78435"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
} |
template<> struct llvm::GraphTraits<souper::Inst*> {
using NodeRef = souper::Inst*;
static NodeRef getEntryNode(souper::Inst* instr) { return instr; }
using ChildIteratorType = std::vector<NodeRef>::iterator;
static ChildIteratorType child_begin(NodeRef N) {
return N->Ops.begin();
}
static ChildIteratorType child_end(NodeRef N) {
return N->Ops.end();
}
using nodes_iterator = llvm::df_iterator<NodeRef>;
static nodes_iterator nodes_begin(souper::Inst* I) {
return nodes_iterator::begin(getEntryNode(I));
}
static nodes_iterator nodes_end(souper::Inst* I) {
return nodes_iterator::end(getEntryNode(I));
}
};
template<> struct llvm::DOTGraphTraits<souper::Inst*> : public llvm::DefaultDOTGraphTraits {
DOTGraphTraits(bool isSimple = false) : DefaultDOTGraphTraits(isSimple) {}
static std::string getGraphName(souper::Inst* instr) { return "Souper IR graph"; }
std::string getNodeLabel(souper::Inst* instr, souper::Inst* root) {
switch(instr->K) {
case souper::Inst::Kind::ReservedConst:
return "ReservedConst";
case souper::Inst::Kind::ReservedInst:
return "ReservedInst";
case souper::Inst::Kind::Var:
return "Var " + instr->Name;
case souper::Inst::Kind::Const:
return instr->Val.toString(10, false);
default:
return std::string(souper::Inst::getKindName(instr->K));
}
}
static std::string getNodeAttributes(const souper::Inst* instr, const souper::Inst* root) {
if (instr == root)
return "style=bold";
return "";
}
static bool renderGraphFromBottomUp() { return true; }
static std::string getNodeIdentifierLabel(souper::Inst* instr, souper::Inst* root) {
return "\"" + std::to_string(reinterpret_cast<intptr_t>(instr)) + "\"";
}
};
#endif
| {
"content_hash": "5aa087bea749b02fffc58eda3963f1b9",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 93,
"avg_line_length": 29.360655737704917,
"alnum_prop": 0.6756002233389168,
"repo_name": "rsas/souper",
"id": "7c3d69304843370374b65e81dd00706a46595a01",
"size": "2684",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "include/souper/Inst/InstGraph.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "8964"
},
{
"name": "C++",
"bytes": "434835"
},
{
"name": "CMake",
"bytes": "12294"
},
{
"name": "Dockerfile",
"bytes": "2432"
},
{
"name": "Go",
"bytes": "14490"
},
{
"name": "LLVM",
"bytes": "118727"
},
{
"name": "Perl",
"bytes": "35326"
},
{
"name": "Python",
"bytes": "14647"
},
{
"name": "Shell",
"bytes": "8848"
}
],
"symlink_target": ""
} |
package vg.civcraft.mc.namelayer.listeners;
import java.util.UUID;
import org.bukkit.Bukkit;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import vg.civcraft.mc.mercury.MercuryAPI;
import vg.civcraft.mc.mercury.events.AsyncPluginBroadcastMessageEvent;
import vg.civcraft.mc.namelayer.GroupManager;
import vg.civcraft.mc.namelayer.GroupManager.PlayerType;
import vg.civcraft.mc.namelayer.NameLayerPlugin;
import vg.civcraft.mc.namelayer.command.commands.InvitePlayer;
import vg.civcraft.mc.namelayer.NameAPI;
import vg.civcraft.mc.namelayer.events.GroupAddInvitation;
import vg.civcraft.mc.namelayer.events.GroupInvalidationEvent;
import vg.civcraft.mc.namelayer.events.GroupRemoveInvitation;
import vg.civcraft.mc.namelayer.group.BlackList;
import vg.civcraft.mc.namelayer.group.Group;
import vg.civcraft.mc.namelayer.permission.GroupPermission;
import vg.civcraft.mc.namelayer.permission.PermissionType;
public class MercuryMessageListener implements Listener{
private GroupManager gm = NameAPI.getGroupManager();
public MercuryMessageListener() {
MercuryAPI.registerPluginMessageChannel("namelayer");
}
@EventHandler(priority = EventPriority.HIGHEST)
public void onMercuryMessage(AsyncPluginBroadcastMessageEvent event){
if (!event.getChannel().equalsIgnoreCase("namelayer"))
return;
String[] message = event.getMessage().split("\\|");
String reason = message[0];
String groupname = message[1];
if (reason.equals("recache")){
GroupInvalidationEvent e = new GroupInvalidationEvent(reason, groupname);
Bukkit.getPluginManager().callEvent(e);
if (GroupManager.getGroup(groupname) != null) {
GroupManager.invalidateCache(groupname);
}
}
else if (reason.equals("create")){
Group group = new Group(groupname, UUID.fromString(message[2]), Boolean.getBoolean(message[3]),
message[4], Integer.parseInt(message[5]));
if (group != null){
gm.createGroup(group, false);
}
}
else if (reason.equals("delete")){
GroupInvalidationEvent e = new GroupInvalidationEvent(reason, groupname);
Bukkit.getPluginManager().callEvent(e);
Group group = GroupManager.getGroup(groupname);
if (group != null) {
gm.deleteGroup(group.getName(), false);
}
}
else if (reason.equals("discipline")){
Group group = GroupManager.getGroup(groupname);
if (group != null && message[2] != null) {
group.setDisciplined(Boolean.getBoolean(message[2]));
}
}
else if (reason.equals("merge")){
GroupInvalidationEvent e = new GroupInvalidationEvent(reason, groupname, message[2]);
Bukkit.getPluginManager().callEvent(e);
Group group = GroupManager.getGroup(groupname);
Group toMerge = GroupManager.getGroup(message[2]);
if ((group != null) && (toMerge != null)){
gm.mergeGroup(group, toMerge, false);
}
}
else if (reason.equals("donemerge")){
GroupInvalidationEvent e = new GroupInvalidationEvent(reason, groupname, message[2]);
Bukkit.getPluginManager().callEvent(e);
Group group = GroupManager.getGroup(groupname);
Group toMerge = GroupManager.getGroup(message[2]);
if ((group != null) && (toMerge != null)){
gm.doneMergeGroup(group, toMerge); // always false
}
}
else if (reason.equals("transfer")){
GroupInvalidationEvent e = new GroupInvalidationEvent(reason, message[2]);
Bukkit.getPluginManager().callEvent(e);
UUID newowner = UUID.fromString(message[2]);
Group group = GroupManager.getGroup(groupname);
if (group != null) {
gm.transferGroup(group, newowner, false);
}
}
else if (reason.equals("addInvitation")){
Group playerGroup = GroupManager.getGroup(Integer.parseInt(groupname));
PlayerType pType = PlayerType.getPlayerType(message[2]);
UUID invitedPlayerUUID = UUID.fromString(message[3]);
UUID inviterUUID = null;
if(message.length >= 5){
inviterUUID = UUID.fromString(message[4]);
}
GroupAddInvitation e = new GroupAddInvitation(playerGroup.getName(), pType, invitedPlayerUUID, inviterUUID);
Bukkit.getPluginManager().callEvent(e);
if (playerGroup != null) {
InvitePlayer.sendInvitation(playerGroup, pType, invitedPlayerUUID, inviterUUID, false);
}
}
else if (reason.equals("removeInvitation")){
Group playerGroup = GroupManager.getGroup(Integer.parseInt(groupname));
UUID invitedPlayerUUID = UUID.fromString(message[2]);
GroupRemoveInvitation e = new GroupRemoveInvitation(playerGroup.getName(), invitedPlayerUUID);
Bukkit.getPluginManager().callEvent(e);
if(playerGroup != null){
playerGroup.removeInvite(invitedPlayerUUID, false);
PlayerListener.removeNotification(invitedPlayerUUID, playerGroup);
}
}
else if (reason.equals("defaultGroup")) {
Group group = GroupManager.getGroup(groupname);
UUID uuid = UUID.fromString(message [2]);
if (group != null && uuid != null){
NameLayerPlugin.getDefaultGroupHandler().setDefaultGroup(uuid, group, false);
}
}
else if (reason.equals("addMember")){
Group group = GroupManager.getGroup(groupname);
UUID uuid = UUID.fromString(message[2]);
PlayerType type = PlayerType.getPlayerType(message[3]);
if (group != null && uuid != null){
group.addMember(uuid, type, false);
}
}
else if (reason.equals("removeMember")){
Group group = GroupManager.getGroup(groupname);
UUID uuid = UUID.fromString(message[2]);
if (group != null && uuid != null){
group.removeMember(uuid, false);
}
}
else if (reason.equals("setOwner")){
Group group = GroupManager.getGroup(groupname);
UUID uuid = UUID.fromString(message[2]);
if (group != null && uuid != null){
group.setOwner(uuid, false);
}
}
else if (reason.equals("setPass")){
Group group = GroupManager.getGroup(groupname);
String pass = message[2];
if (group != null && pass != null){
group.setPassword(pass, false);
}
}
else if (reason.equals("link")){
Group supgroup = GroupManager.getGroup(groupname);
Group subgroup = GroupManager.getGroup(message[2]);
if (supgroup != null && subgroup != null){
Group.link(supgroup, subgroup, false);
}
}
else if (reason.equals("unlink")){
Group supgroup = GroupManager.getGroup(groupname);
Group subgroup = GroupManager.getGroup(message[2]);
if (supgroup != null && subgroup != null){
Group.unlink(supgroup, subgroup, false);
}
}
else if (reason.equals("permadd")){
Group group = GroupManager.getGroup(groupname);
PlayerType ptype = PlayerType.valueOf(message[2]);
PermissionType permt = PermissionType.getPermission(message[3]);
if (group != null){
GroupPermission perms = gm.getPermissionforGroup(group);
perms.addPermission(ptype, permt, false);
}
}
else if (reason.equals("permrem")){
Group group = GroupManager.getGroup(groupname);
PlayerType ptype = PlayerType.valueOf(message[2]);
PermissionType permt = PermissionType.getPermission(message[3]);
if (group != null){
GroupPermission perms = gm.getPermissionforGroup(group);
perms.removePermission(ptype, permt, false);
}
}
else if (reason.equals("blAdd")){
BlackList bl = NameLayerPlugin.getBlackList();
UUID uuid = UUID.fromString(message[2]);
if (bl != null && uuid != null){
bl.addBlacklistMember(groupname, uuid, false);
}
}
else if (reason.equals("blRem")){
BlackList bl = NameLayerPlugin.getBlackList();
UUID uuid = UUID.fromString(message[2]);
if (bl != null && uuid != null){
bl.removeBlacklistMember(groupname, uuid, false);
}
}
else if (reason.equals("disciplined")){
Group group = GroupManager.getGroup(groupname);
boolean disp = Boolean.getBoolean(message[2]);
if (group != null){
group.setDisciplined(disp, false);
}
}
else if (reason.equals("removeAutoAccept")) {
UUID uuid = UUID.fromString(message[1]);
NameLayerPlugin.getAutoAcceptHandler().setAutoAccept(uuid, false, false);
}
else if (reason.equals("addAutoAccept")) {
UUID uuid = UUID.fromString(message[1]);
NameLayerPlugin.getAutoAcceptHandler().setAutoAccept(uuid, true, false);
}
}
}
| {
"content_hash": "4ec04df956b497ac20e0ce2f36ac67d8",
"timestamp": "",
"source": "github",
"line_count": 218,
"max_line_length": 111,
"avg_line_length": 37.169724770642205,
"alnum_prop": 0.7149203998519067,
"repo_name": "Civcraft/NameLayer",
"id": "6099bbf9867660f2609e6227ff2903a55891772a",
"size": "8103",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "namelayer-spigot/src/main/java/vg/civcraft/mc/namelayer/listeners/MercuryMessageListener.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "417853"
}
],
"symlink_target": ""
} |
/**
* \version $Id: alustusvirhe.cc 2677 2013-02-20 13:03:17Z voutilai $
* \file alustusvirhe.cc
* \brief Alustusvirhe-luokan toteutus. ($Revision: 2677 $)
* \author ©2010 Eero Salonen <eero.j.salonen@tut.fi>
*/
#include "../alustusvirhe.hh"
#include <string>
#include "hyodykkeet.hh"
// Standardikirjastot
#include <map>
using std::map;
// Using-lauseet otsikkotiedostossa käytteen otetuille asioille.
using std::basic_ostream;
using std::string;
using Julkinen::Alustusvirhe;
namespace
{
typedef
map<Alustusvirhe::Virhekoodi, char const*>
Virheviestit;
typedef
Virheviestit::value_type
Virheviesti;
Virheviestit::value_type const VIRHEVIESTIARVOT[] = {
Virheviesti(
Alustusvirhe::VIRHE_PALASSA_ON_JO_ESINE,
"Koordinaatissa on jo esine."
),
Virheviesti(
Alustusvirhe::VIRHEELLINEN_SIJAINTI,
"Annettu koordinaatti on pelialueen ulkopuolella."
),
Virheviesti(
Alustusvirhe::VIRHE_ANNETTUA_PELAAJAA_EI_LOYTYNYT,
"Esineelle annettua pelaajaa ei loytynyt pelaajien joukosta."
),
Virheviesti(
Alustusvirhe::VIRHE_LIIKAA_PARAMETREJA,
"Liikaa parametreja."
),
Virheviesti(
Alustusvirhe::VIRHE_TUNNISTAMATON,
"Tunnistamaton virhe."
)
};
Virheviestit VIRHEVIESTIT(
VIRHEVIESTIARVOT,
VIRHEVIESTIARVOT +
sizeof(VIRHEVIESTIARVOT)/sizeof(Virheviestit::value_type)
);
/*
char const*
muunnaViestiksi(Alustusvirhe::Virhekoodi virhe)
{
Virheviestit::const_iterator viesti(VIRHEVIESTIT.find(virhe));
if (VIRHEVIESTIT.end() == viesti)
{
viesti = VIRHEVIESTIT.find(Alustusvirhe::VIRHE_TUNNISTAMATON);
}
return viesti->second;
}
*/
}
Alustusvirhe::Alustusvirhe(std::string const& virheviesti)
: Virhe(virheviesti), virhe_(VIRHE_TUNNISTAMATON)
{
}
Alustusvirhe::Alustusvirhe(Virhekoodi virhekoodi)
: Virhe(muunna(virhekoodi, VIRHEVIESTIT, VIRHE_TUNNISTAMATON)),
virhe_(virhekoodi)
{
}
Alustusvirhe::Alustusvirhe(Alustusvirhe const& toinen)
: Virhe(toinen), virhe_(toinen.virhe_)
{
}
Alustusvirhe::Virhekoodi
Alustusvirhe::virhe()
const
{
return virhe_;
}
Alustusvirhe&
Alustusvirhe::operator=(Alustusvirhe const& toinen)
{
Virhe::operator=(toinen);
if (this != &toinen)
{
virhe_ = toinen.virhe_;
}
return *this;
}
basic_ostream<char>&
Alustusvirhe::tulosta(basic_ostream<char>& tuloste)
const
{
tuloste << "VIRHE: ";
Virhe::tulosta(tuloste);
return tuloste;
}
| {
"content_hash": "e43bb1964da357f94806337aba5b252c",
"timestamp": "",
"source": "github",
"line_count": 125,
"max_line_length": 81,
"avg_line_length": 22.928,
"alnum_prop": 0.6018841591067691,
"repo_name": "SanteriHetekivi/Labyrinttipeli",
"id": "12ed425727f1a72c3f2696a0b90b0f3b238cdd1a",
"size": "2866",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "valmiiden_toteutus/alustusvirhe.cc",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "677"
},
{
"name": "C++",
"bytes": "212928"
},
{
"name": "CSS",
"bytes": "31169"
},
{
"name": "HTML",
"bytes": "2440479"
},
{
"name": "JavaScript",
"bytes": "55113"
},
{
"name": "Makefile",
"bytes": "508"
},
{
"name": "PostScript",
"bytes": "70457"
},
{
"name": "Scilab",
"bytes": "729"
},
{
"name": "TeX",
"bytes": "473347"
}
],
"symlink_target": ""
} |
from Expression import *
class LinearExpression(Expression):
"""
Class representing a linear expression node in the AST of a MLP
"""
def __init__(self):
Expression.__init__(self)
class ValuedLinearExpression(LinearExpression):
"""
Class representing a valued linear expression node in the AST of a MLP
"""
def __init__(self, value):
"""
Set the single value of this linear expression
:param value : Identifier | Number
"""
LinearExpression.__init__(self)
self.value = value
def __str__(self):
"""
to string
"""
return "ValuedExpr:" + str(self.value)
def setupEnvironment(self, codeSetup):
"""
Generate the MiniZinc code for the declaration of identifiers and sets in this linear expression
"""
codeSetup.setupEnvironment(self)
def generateCode(self, codeGenerator):
"""
Generate the MiniZinc code for this valued linear expression
"""
return codeGenerator.generateCode(self)
class LinearExpressionBetweenParenthesis(LinearExpression):
"""
Class representing a linear expression between parenthesis node in the AST of a MLP
"""
def __init__(self, linearExpression):
"""
Set the linear expression
:param linearExpression : LinearExpression
"""
LinearExpression.__init__(self)
self.linearExpression = linearExpression
def __str__(self):
"""
to string
"""
return "LE: (" + str(self.linearExpression) + ")"
def setupEnvironment(self, codeSetup):
"""
Generate the MiniZinc code for the declaration of identifiers and sets in this linear expression
"""
codeSetup.setupEnvironment(self)
def generateCode(self, codeGenerator):
"""
Generate the MiniZinc code for this linear expression
"""
return codeGenerator.generateCode(self)
class LinearExpressionWithArithmeticOperation(LinearExpression):
"""
Class representing a linear expression with arithmetic operation node in the AST of a MLP
"""
PLUS = "+"
MINUS = "-"
TIMES = "*"
DIV = "/"
def __init__(self, op, expression1, expression2):
"""
Set the expressions participating in the arithmetic operation
:param op : (PLUS, MINUS, TIMES, DIV)
:param expression1 : LinearExpression | NumericExpression
:param expression2 : LinearExpression | NumericExpression
"""
LinearExpression.__init__(self)
self.op = op
self.expression1 = expression1
self.expression2 = expression2
def __str__(self):
"""
to string
"""
return "OpLE:" + str(self.expression1) + " " + self.op + " " + str(self.expression2)
def setupEnvironment(self, codeSetup):
"""
Generate the MiniZinc code for the declaration of identifiers and sets in this linear expression
"""
codeSetup.setupEnvironment(self)
def generateCode(self, codeGenerator):
"""
Generate the MiniZinc code for this linear expression with arithmetic pperation
"""
return codeGenerator.generateCode(self)
class MinusLinearExpression(LinearExpression):
"""
Class representing a minus linear expression node in the AST of a MLP
"""
def __init__(self, linearExpression):
"""
Set the expressions being negated
:param linearExpression: LinearExpression
"""
LinearExpression.__init__(self)
self.linearExpression = linearExpression
def __str__(self):
"""
to string
"""
return "MinusLE:" + "-(" + str(self.linearExpression) + ")"
def setupEnvironment(self, codeSetup):
"""
Generate the MiniZinc code for the declaration of identifiers and sets in this linear expression
"""
codeSetup.setupEnvironment(self)
def generateCode(self, codeGenerator):
"""
Generate the MiniZinc code for this minus linear expression
"""
return codeGenerator.generateCode(self)
class IteratedLinearExpression(LinearExpression):
"""
Class representing a iterated linear expression node in the AST of a MLP
"""
def __init__(self, linearExpression, indexingExpression, numericExpression = None):
"""
Set the components of the iterated linear expression
:param linearExpression : LinearExpression
:param indexingExpression : IndexingExpression
:param numericExpression : NumericExpression
"""
LinearExpression.__init__(self)
self.linearExpression = linearExpression
self.indexingExpression = indexingExpression
self.numericExpression = numericExpression
def __str__(self):
"""
to string
"""
res = "sum(" + str(self.indexingExpression) + ")"
if self.numericExpression:
res += "^(" + str(self.numericExpression) + ")"
res += "(" + str(self.linearExpression) + ")"
return "ItLE:" + res
def setupEnvironment(self, codeSetup):
"""
Generate the MiniZinc code for the declaration of identifiers and sets in this linear expression
"""
codeSetup.setupEnvironment(self)
def generateCode(self, codeGenerator):
"""
Generate the MiniZinc code for this iterated linear expression
"""
return codeGenerator.generateCode(self)
class ConditionalLinearExpression(LinearExpression):
"""
Class representing a conditional linear expression node in the AST of a MLP
"""
def __init__(self, logicalExpression, linearExpression1 = None, linearExpression2 = None, elseIfExpression = None):
"""
Set the conditional linear expression
:param logicalExpression : LogicalExpression
:param linearExpression1 : LinearExpression
:param linearExpression2 : LinearExpression
:param elseIfExpression : ElseIfExpressionList
"""
LinearExpression.__init__(self)
self.logicalExpression = logicalExpression
self.linearExpression1 = linearExpression1
self.linearExpression2 = linearExpression2
self.elseIfEpression = elseIfEpression
def __str__(self):
"""
to string
"""
res = "ConditionalLinearExpression: " + " IF "+str(self.logicalExpression)
if self.linearExpression1:
res += " THEN " + str(self.linearExpression1)
if self.elseIfExpression:
res += str(self.elseIfExpression)
if self.linearExpression2 != None:
res += " ELSE " + str(self.linearExpression2)
res += " ENDIF "
return res
def addElseIfExpression(self, elseIfExpression):
self.elseIfExpression = elseIfExpression
def addElseExpression(self, elseExpression):
self.linearExpression2 = elseExpression
def getDependencies(self, codeGenerator):
dep = self.logicalExpression.getDependencies(codeGenerator) + self.linearExpression1.getDependencies(codeGenerator)
if self.elseIfExpression != None:
dep += self.elseIfExpression.getDependencies(codeGenerator)
if self.linearExpression2 != None:
dep += self.linearExpression2.getDependencies(codeGenerator)
return list(set(dep))
def setupEnvironment(self, codeSetup):
"""
Setup the MiniZinc code for the identifiers and sets used in this conditional linear expression
"""
codeSetup.setupEnvironment(self)
def prepare(self, codePrepare):
"""
Prepare the MiniZinc code for the identifiers and sets used in this conditional linear expression
"""
codePrepare.prepare(self)
def generateCode(self, codeGenerator):
"""
Generate the MiniZinc code for this contitional linear expression
"""
return codeGenerator.generateCode(self)
| {
"content_hash": "4ac4fc1c439e095661f8a2b093d4af5a",
"timestamp": "",
"source": "github",
"line_count": 280,
"max_line_length": 123,
"avg_line_length": 29.5,
"alnum_prop": 0.6222760290556901,
"repo_name": "rafaellc28/Latex2MiniZinc",
"id": "a9100cfff21f249f59863ca5236d3b9be0b79ceb",
"size": "8260",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "latex2minizinc/LinearExpression.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2918583"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.