code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using u8 = System.Byte;
using u16 = System.UInt16;
using s32 = System.Int32;
using u32 = System.UInt32;
using gps_time_t = System.UInt64;
namespace MissionPlanner.Utilities
{
public class rtcm3 : ICorrections
{
const byte RTCM3PREAMB = 0xD3;
int step = 0;
byte[] buffer = new u8[1024 * 4];
u32 payloadlen = 0;
int msglencount = 0;
rtcmpreamble pre;
public int Read(byte data)
{
switch (step)
{
default:
case 0:
if (data == RTCM3PREAMB)
{
step = 1;
buffer[0] = data;
}
break;
case 1:
buffer[1] = data;
step++;
break;
case 2:
buffer[2] = data;
step++;
pre = new rtcmpreamble();
pre.Read(buffer);
payloadlen = pre.length;
msglencount = 0;
// reset on oversize packet
if (payloadlen > buffer.Length)
step = 0;
break;
case 3:
if (msglencount < (payloadlen))
{
buffer[msglencount + 3] = data;
msglencount++;
}
else
{
step++;
goto case 4;
}
break;
case 4:
buffer[payloadlen + 3] = data;
step++;
break;
case 5:
buffer[payloadlen + 3 + 1] = data;
step++;
break;
case 6:
buffer[payloadlen + 3 + 2] = data;
payloadlen = payloadlen + 3;
u32 crc = crc24.crc24q(buffer, payloadlen, 0);
u32 crcpacket = getbitu(buffer, payloadlen * 8, 24);
if (crc == crcpacket)
{
rtcmheader head = new rtcmheader();
head.Read(buffer);
step = 0;
return head.messageno;
}
step = 0;
break;
}
return -1;
}
static uint getbitu(u8[] buff, u32 pos, u32 len)
{
uint bits = 0;
u32 i;
for (i = pos; i < pos + len; i++)
bits = (uint)((bits << 1) + ((buff[i / 8] >> (int)(7 - i % 8)) & 1u));
return bits;
}
static void setbitu(u8[] buff, u32 pos, u32 len, u32 data)
{
u32 mask = 1u << (int)(len - 1);
if (len <= 0 || 32 < len) return;
for (u32 i = pos; i < pos + len; i++, mask >>= 1)
{
if ((data & mask) > 0)
buff[i / 8] |= (byte)(1u << (int)(7 - i % 8));
else
buff[i / 8] &= (byte)(~(1u << (int)(7 - i % 8)));
}
}
public class rtcmpreamble
{
public u8 preamble = RTCM3PREAMB;
public u8 resv1;
public u16 length;
public void Read(byte[] buffer)
{
uint i = 0;
preamble = (byte)getbitu(buffer, i, 8); i += 8;
resv1 = (byte)getbitu(buffer, i, 6); i += 6;
length = (u16)getbitu(buffer, i, 10); i += 10;
}
public byte[] Write(byte[] buffer)
{
uint i = 0;
setbitu(buffer, i, 8, RTCM3PREAMB); i += 8;
setbitu(buffer, i, 6, resv1); i += 6;
setbitu(buffer, i, 10, length); i += 10;
return buffer;
}
}
public class rtcmheader
{
public u16 messageno;
public u16 refstationid;
public u32 epoch;
public u8 sync;
public u8 nsat;
public u8 smoothind;
public u8 smoothint;
public void Read(byte[] buffer)
{
u32 i = 24;
messageno = (u16)getbitu(buffer, i, 12); i += 12; /* message no */
refstationid = (u16)getbitu(buffer, i, 12); i += 12; /* ref station id */
epoch = (u32)getbitu(buffer, i, 30); i += 30; /* gps epoch time */
sync = (u8)getbitu(buffer, i, 1); i += 1; /* synchronous gnss flag */
nsat = (u8)getbitu(buffer, i, 5); i += 5; /* no of satellites */
smoothind = (u8)getbitu(buffer, i, 1); i += 1; /* smoothing indicator */
smoothint = (u8)getbitu(buffer, i, 3); i += 3; /* smoothing interval */
}
public byte[] Write(byte[] buffer)
{
u32 i = 24;
setbitu(buffer, i, 12, messageno); i += 12; /* message no */
setbitu(buffer, i, 12, refstationid); i += 12; /* ref station id */
setbitu(buffer, i, 30, epoch); i += 30; /* gps epoch time */
setbitu(buffer, i, 1, sync); i += 1; /* synchronous gnss flag */
setbitu(buffer, i, 5, nsat); i += 5; /* no of satellites */
setbitu(buffer, i, 1, smoothind); i += 1; /* smoothing indicator */
setbitu(buffer, i, 3, smoothint); i += 3; /* smoothing interval */
return buffer;
}
}
public class crc24
{
static u32[] crc24qtab = new u32[] {
0x000000, 0x864CFB, 0x8AD50D, 0x0C99F6, 0x93E6E1, 0x15AA1A, 0x1933EC, 0x9F7F17,
0xA18139, 0x27CDC2, 0x2B5434, 0xAD18CF, 0x3267D8, 0xB42B23, 0xB8B2D5, 0x3EFE2E,
0xC54E89, 0x430272, 0x4F9B84, 0xC9D77F, 0x56A868, 0xD0E493, 0xDC7D65, 0x5A319E,
0x64CFB0, 0xE2834B, 0xEE1ABD, 0x685646, 0xF72951, 0x7165AA, 0x7DFC5C, 0xFBB0A7,
0x0CD1E9, 0x8A9D12, 0x8604E4, 0x00481F, 0x9F3708, 0x197BF3, 0x15E205, 0x93AEFE,
0xAD50D0, 0x2B1C2B, 0x2785DD, 0xA1C926, 0x3EB631, 0xB8FACA, 0xB4633C, 0x322FC7,
0xC99F60, 0x4FD39B, 0x434A6D, 0xC50696, 0x5A7981, 0xDC357A, 0xD0AC8C, 0x56E077,
0x681E59, 0xEE52A2, 0xE2CB54, 0x6487AF, 0xFBF8B8, 0x7DB443, 0x712DB5, 0xF7614E,
0x19A3D2, 0x9FEF29, 0x9376DF, 0x153A24, 0x8A4533, 0x0C09C8, 0x00903E, 0x86DCC5,
0xB822EB, 0x3E6E10, 0x32F7E6, 0xB4BB1D, 0x2BC40A, 0xAD88F1, 0xA11107, 0x275DFC,
0xDCED5B, 0x5AA1A0, 0x563856, 0xD074AD, 0x4F0BBA, 0xC94741, 0xC5DEB7, 0x43924C,
0x7D6C62, 0xFB2099, 0xF7B96F, 0x71F594, 0xEE8A83, 0x68C678, 0x645F8E, 0xE21375,
0x15723B, 0x933EC0, 0x9FA736, 0x19EBCD, 0x8694DA, 0x00D821, 0x0C41D7, 0x8A0D2C,
0xB4F302, 0x32BFF9, 0x3E260F, 0xB86AF4, 0x2715E3, 0xA15918, 0xADC0EE, 0x2B8C15,
0xD03CB2, 0x567049, 0x5AE9BF, 0xDCA544, 0x43DA53, 0xC596A8, 0xC90F5E, 0x4F43A5,
0x71BD8B, 0xF7F170, 0xFB6886, 0x7D247D, 0xE25B6A, 0x641791, 0x688E67, 0xEEC29C,
0x3347A4, 0xB50B5F, 0xB992A9, 0x3FDE52, 0xA0A145, 0x26EDBE, 0x2A7448, 0xAC38B3,
0x92C69D, 0x148A66, 0x181390, 0x9E5F6B, 0x01207C, 0x876C87, 0x8BF571, 0x0DB98A,
0xF6092D, 0x7045D6, 0x7CDC20, 0xFA90DB, 0x65EFCC, 0xE3A337, 0xEF3AC1, 0x69763A,
0x578814, 0xD1C4EF, 0xDD5D19, 0x5B11E2, 0xC46EF5, 0x42220E, 0x4EBBF8, 0xC8F703,
0x3F964D, 0xB9DAB6, 0xB54340, 0x330FBB, 0xAC70AC, 0x2A3C57, 0x26A5A1, 0xA0E95A,
0x9E1774, 0x185B8F, 0x14C279, 0x928E82, 0x0DF195, 0x8BBD6E, 0x872498, 0x016863,
0xFAD8C4, 0x7C943F, 0x700DC9, 0xF64132, 0x693E25, 0xEF72DE, 0xE3EB28, 0x65A7D3,
0x5B59FD, 0xDD1506, 0xD18CF0, 0x57C00B, 0xC8BF1C, 0x4EF3E7, 0x426A11, 0xC426EA,
0x2AE476, 0xACA88D, 0xA0317B, 0x267D80, 0xB90297, 0x3F4E6C, 0x33D79A, 0xB59B61,
0x8B654F, 0x0D29B4, 0x01B042, 0x87FCB9, 0x1883AE, 0x9ECF55, 0x9256A3, 0x141A58,
0xEFAAFF, 0x69E604, 0x657FF2, 0xE33309, 0x7C4C1E, 0xFA00E5, 0xF69913, 0x70D5E8,
0x4E2BC6, 0xC8673D, 0xC4FECB, 0x42B230, 0xDDCD27, 0x5B81DC, 0x57182A, 0xD154D1,
0x26359F, 0xA07964, 0xACE092, 0x2AAC69, 0xB5D37E, 0x339F85, 0x3F0673, 0xB94A88,
0x87B4A6, 0x01F85D, 0x0D61AB, 0x8B2D50, 0x145247, 0x921EBC, 0x9E874A, 0x18CBB1,
0xE37B16, 0x6537ED, 0x69AE1B, 0xEFE2E0, 0x709DF7, 0xF6D10C, 0xFA48FA, 0x7C0401,
0x42FA2F, 0xC4B6D4, 0xC82F22, 0x4E63D9, 0xD11CCE, 0x575035, 0x5BC9C3, 0xDD8538
};
/** Calculate Qualcomm 24-bit Cyclical Redundancy Check (CRC-24Q).
*
* The CRC polynomial used is:
* \f[
* x^{24} + x^{23} + x^{18} + x^{17} + x^{14} + x^{11} + x^{10} +
* x^7 + x^6 + x^5 + x^4 + x^3 + x+1
* \f]
* Mask 0x1864CFB, not reversed, not XOR'd
*
* \param buf Array of data to calculate CRC for
* \param len Length of data array
* \param crc Initial CRC value
*
* \return CRC-24Q value
*/
public static u32 crc24q(u8[] buf, u32 len, u32 crc)
{
for (u32 i = 0; i < len; i++)
crc = ((crc << 8) & 0xFFFFFF) ^ crc24qtab[(crc >> 16) ^ buf[i]];
return crc;
}
}
public s32 length
{
get { return (s32)(payloadlen + 2 + 1); }
}
public u8[] packet
{
get { return buffer; }
}
}
} | uwafsl/MissionPlanner | Utilities/rtcm3.cs | C# | gpl-3.0 | 10,196 |
// Set iterators produces entries in the order they were inserted.
var set = Set();
var i;
for (i = 7; i !== 1; i = i * 7 % 1117)
set.add(i);
assertEq(set.size, 557);
i = 7;
for (var v of set) {
assertEq(v, i);
i = i * 7 % 1117;
}
assertEq(i, 1);
| SlateScience/MozillaJS | js/src/jit-test/tests/collections/Set-iterator-order.js | JavaScript | mpl-2.0 | 261 |
/**
* This Source Code Form is subject to the terms of the Mozilla Public License,
* v. 2.0. If a copy of the MPL was not distributed with this file, You can
* obtain one at http://mozilla.org/MPL/2.0/. OpenMRS is also distributed under
* the terms of the Healthcare Disclaimer located at http://openmrs.org/license.
*
* Copyright (C) OpenMRS Inc. OpenMRS is a registered trademark and the OpenMRS
* graphic logo is a trademark of OpenMRS Inc.
*/
package org.openmrs.annotation;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Place it on classes which you want to be beans created conditionally based on
* OpenMRS version and/or started modules.
*
* @since 1.10, 1.9.8, 1.8.5, 1.7.5
*/
@Target( { ElementType.TYPE })
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface OpenmrsProfile {
public String openmrsVersion() default "";
public String[] modules() default {};
}
| jembi/openmrs-core | api/src/main/java/org/openmrs/annotation/OpenmrsProfile.java | Java | mpl-2.0 | 1,065 |
package tc.oc.commons.core.plugin;
import java.util.Set;
import tc.oc.commons.core.commands.CommandRegistry;
import tc.oc.commons.core.commands.Commands;
import tc.oc.commons.core.commands.NestedCommands;
import tc.oc.minecraft.api.event.Activatable;
import tc.oc.commons.core.inject.Facet;
/**
* Something that needs to be enabled and disabled (along with a plugin).
*
* Each plugin has a private set of facets, configured through a {@link PluginFacetBinder}.
* To get the instances, @Inject a {@link Set< PluginFacet >}.
*
* Facets are automatically enabled and disabled at the same time as the
* plugin they are bound to.
*
* If a facet implements the {@link tc.oc.minecraft.api.event.Listener} interfaces,
* it will also be registered to receive events.
*
* If it implements {@link Commands} or {@link NestedCommands}, it will be registered
* through a {@link CommandRegistry}.
*
* Specific plugins may do other automatic things with their own facets, be we
* don't yet have a framework for extending facets across all plugins.
*/
public interface PluginFacet extends Facet, Activatable {
}
| cswhite2000/ProjectAres | Util/core/src/main/java/tc/oc/commons/core/plugin/PluginFacet.java | Java | agpl-3.0 | 1,116 |
(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);throw new Error("Cannot find module '"+o+"'")}var f=n[o]={exports:{}};t[o][0].call(f.exports,function(e){var n=t[o][1][e];return s(n?n:e)},f,f.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
exports.FLACDemuxer = require('./src/demuxer');
exports.FLACDecoder = require('./src/decoder');
require('./src/ogg');
},{"./src/decoder":2,"./src/demuxer":3,"./src/ogg":4}],2:[function(require,module,exports){
/*
* FLAC.js - Free Lossless Audio Codec decoder in JavaScript
* Original C version from FFmpeg (c) 2003 Alex Beregszaszi
* JavaScript port by Devon Govett and Jens Nockert of Official.fm Labs
*
* Licensed under the same terms as the original. The original
* license follows.
*
* FLAC.js is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FLAC.js is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
*/
var AV = (window.AV);
var FLACDecoder = AV.Decoder.extend(function() {
AV.Decoder.register('flac', this);
this.prototype.setCookie = function(cookie) {
this.cookie = cookie;
// initialize arrays
this.decoded = [];
for (var i = 0; i < this.format.channelsPerFrame; i++) {
this.decoded[i] = new Int32Array(cookie.maxBlockSize);
}
};
const BLOCK_SIZES = new Int16Array([
0, 192, 576 << 0, 576 << 1, 576 << 2, 576 << 3, 0, 0,
256 << 0, 256 << 1, 256 << 2, 256 << 3, 256 << 4, 256 << 5, 256 << 6, 256 << 7
]);
const SAMPLE_RATES = new Int32Array([
0, 88200, 176400, 192000,
8000, 16000, 22050, 24000, 32000, 44100, 48000, 96000,
0, 0, 0, 0
]);
const SAMPLE_SIZES = new Int8Array([
0, 8, 12, 0, 16, 20, 24, 0
]);
const MAX_CHANNELS = 8,
CHMODE_INDEPENDENT = 0,
CHMODE_LEFT_SIDE = 8,
CHMODE_RIGHT_SIDE = 9,
CHMODE_MID_SIDE = 10;
this.prototype.readChunk = function() {
var stream = this.bitstream;
if (!stream.available(32))
return;
// frame sync code
if ((stream.read(15) & 0x7FFF) !== 0x7FFC)
throw new Error('Invalid sync code');
var isVarSize = stream.read(1), // variable block size stream code
bsCode = stream.read(4), // block size
srCode = stream.read(4), // sample rate code
chMode = stream.read(4), // channel mode
bpsCode = stream.read(3); // bits per sample
stream.advance(1); // reserved bit
// channels
this.chMode = chMode;
var channels;
if (chMode < MAX_CHANNELS) {
channels = chMode + 1;
this.chMode = CHMODE_INDEPENDENT;
} else if (chMode <= CHMODE_MID_SIDE) {
channels = 2;
} else {
throw new Error('Invalid channel mode');
}
if (channels !== this.format.channelsPerFrame)
throw new Error('Switching channel layout mid-stream not supported.');
// bits per sample
if (bpsCode === 3 || bpsCode === 7)
throw new Error('Invalid sample size code');
this.bps = SAMPLE_SIZES[bpsCode];
if (this.bps !== this.format.bitsPerChannel)
throw new Error('Switching bits per sample mid-stream not supported.');
var sampleShift, is32;
if (this.bps > 16) {
sampleShift = 32 - this.bps;
is32 = true;
} else {
sampleShift = 16 - this.bps;
is32 = false;
}
// sample number or frame number
// see http://www.hydrogenaudio.org/forums/index.php?s=ea7085ffe6d57132c36e6105c0d434c9&showtopic=88390&pid=754269&st=0&#entry754269
var ones = 0;
while (stream.read(1) === 1)
ones++;
var frame_or_sample_num = stream.read(7 - ones);
for (; ones > 1; ones--) {
stream.advance(2); // == 2
frame_or_sample_num = (frame_or_sample_num << 6) | stream.read(6);
}
// block size
if (bsCode === 0)
throw new Error('Reserved blocksize code');
else if (bsCode === 6)
this.blockSize = stream.read(8) + 1;
else if (bsCode === 7)
this.blockSize = stream.read(16) + 1;
else
this.blockSize = BLOCK_SIZES[bsCode];
// sample rate
var sampleRate;
if (srCode < 12)
sampleRate = SAMPLE_RATES[srCode];
else if (srCode === 12)
sampleRate = stream.read(8) * 1000;
else if (srCode === 13)
sampleRate = stream.read(16);
else if (srCode === 14)
sampleRate = stream.read(16) * 10;
else
throw new Error('Invalid sample rate code');
stream.advance(8); // skip CRC check
// subframes
for (var i = 0; i < channels; i++)
this.decodeSubframe(i);
stream.align();
stream.advance(16); // skip CRC frame footer
var output = new ArrayBuffer(this.blockSize * channels * this.bps / 8),
buf = is32 ? new Int32Array(output) : new Int16Array(output),
blockSize = this.blockSize,
decoded = this.decoded,
j = 0;
switch (this.chMode) {
case CHMODE_INDEPENDENT:
for (var k = 0; k < blockSize; k++) {
for (var i = 0; i < channels; i++) {
buf[j++] = decoded[i][k] << sampleShift;
}
}
break;
case CHMODE_LEFT_SIDE:
for (var i = 0; i < blockSize; i++) {
var left = decoded[0][i],
right = decoded[1][i];
buf[j++] = left << sampleShift;
buf[j++] = (left - right) << sampleShift;
}
break;
case CHMODE_RIGHT_SIDE:
for (var i = 0; i < blockSize; i++) {
var left = decoded[0][i],
right = decoded[1][i];
buf[j++] = (left + right) << sampleShift;
buf[j++] = right << sampleShift;
}
break;
case CHMODE_MID_SIDE:
for (var i = 0; i < blockSize; i++) {
var left = decoded[0][i],
right = decoded[1][i];
left -= right >> 1;
buf[j++] = (left + right) << sampleShift;
buf[j++] = left << sampleShift;
}
break;
}
return buf;
};
this.prototype.decodeSubframe = function(channel) {
var wasted = 0,
stream = this.bitstream,
blockSize = this.blockSize,
decoded = this.decoded;
this.curr_bps = this.bps;
if (channel === 0) {
if (this.chMode === CHMODE_RIGHT_SIDE)
this.curr_bps++;
} else {
if (this.chMode === CHMODE_LEFT_SIDE || this.chMode === CHMODE_MID_SIDE)
this.curr_bps++;
}
if (stream.read(1))
throw new Error("Invalid subframe padding");
var type = stream.read(6);
if (stream.read(1)) {
wasted = 1;
while (!stream.read(1))
wasted++;
this.curr_bps -= wasted;
}
if (this.curr_bps > 32)
throw new Error("decorrelated bit depth > 32 (" + this.curr_bps + ")");
if (type === 0) {
var tmp = stream.read(this.curr_bps, true);
for (var i = 0; i < blockSize; i++)
decoded[channel][i] = tmp;
} else if (type === 1) {
var bps = this.curr_bps;
for (var i = 0; i < blockSize; i++)
decoded[channel][i] = stream.read(bps, true);
} else if ((type >= 8) && (type <= 12)) {
this.decode_subframe_fixed(channel, type & ~0x8);
} else if (type >= 32) {
this.decode_subframe_lpc(channel, (type & ~0x20) + 1);
} else {
throw new Error("Invalid coding type");
}
if (wasted) {
for (var i = 0; i < blockSize; i++)
decoded[channel][i] <<= wasted;
}
};
this.prototype.decode_subframe_fixed = function(channel, predictor_order) {
var decoded = this.decoded[channel],
stream = this.bitstream,
bps = this.curr_bps;
// warm up samples
for (var i = 0; i < predictor_order; i++)
decoded[i] = stream.read(bps, true);
this.decode_residuals(channel, predictor_order);
var a = 0, b = 0, c = 0, d = 0;
if (predictor_order > 0)
a = decoded[predictor_order - 1];
if (predictor_order > 1)
b = a - decoded[predictor_order - 2];
if (predictor_order > 2)
c = b - decoded[predictor_order - 2] + decoded[predictor_order - 3];
if (predictor_order > 3)
d = c - decoded[predictor_order - 2] + 2 * decoded[predictor_order - 3] - decoded[predictor_order - 4];
switch (predictor_order) {
case 0:
break;
case 1:
case 2:
case 3:
case 4:
var abcd = new Int32Array([a, b, c, d]),
blockSize = this.blockSize;
for (var i = predictor_order; i < blockSize; i++) {
abcd[predictor_order - 1] += decoded[i];
for (var j = predictor_order - 2; j >= 0; j--) {
abcd[j] += abcd[j + 1];
}
decoded[i] = abcd[0];
}
break;
default:
throw new Error("Invalid Predictor Order " + predictor_order);
}
};
this.prototype.decode_subframe_lpc = function(channel, predictor_order) {
var stream = this.bitstream,
decoded = this.decoded[channel],
bps = this.curr_bps,
blockSize = this.blockSize;
// warm up samples
for (var i = 0; i < predictor_order; i++) {
decoded[i] = stream.read(bps, true);
}
var coeff_prec = stream.read(4) + 1;
if (coeff_prec === 16)
throw new Error("Invalid coefficient precision");
var qlevel = stream.read(5, true);
if (qlevel < 0)
throw new Error("Negative qlevel, maybe buggy stream");
var coeffs = new Int32Array(32);
for (var i = 0; i < predictor_order; i++) {
coeffs[i] = stream.read(coeff_prec, true);
}
this.decode_residuals(channel, predictor_order);
if (this.bps > 16)
throw new Error("no 64-bit integers in JS, could probably use doubles though");
for (var i = predictor_order; i < blockSize - 1; i += 2) {
var d = decoded[i - predictor_order],
s0 = 0, s1 = 0, c;
for (var j = predictor_order - 1; j > 0; j--) {
c = coeffs[j];
s0 += c * d;
d = decoded[i - j];
s1 += c * d;
}
c = coeffs[0];
s0 += c * d;
d = decoded[i] += (s0 >> qlevel);
s1 += c * d;
decoded[i + 1] += (s1 >> qlevel);
}
if (i < blockSize) {
var sum = 0;
for (var j = 0; j < predictor_order; j++)
sum += coeffs[j] * decoded[i - j - 1];
decoded[i] += (sum >> qlevel);
}
};
const INT_MAX = 32767;
this.prototype.decode_residuals = function(channel, predictor_order) {
var stream = this.bitstream,
method_type = stream.read(2);
if (method_type > 1)
throw new Error('Illegal residual coding method ' + method_type);
var rice_order = stream.read(4),
samples = (this.blockSize >>> rice_order);
if (predictor_order > samples)
throw new Error('Invalid predictor order ' + predictor_order + ' > ' + samples);
var decoded = this.decoded[channel],
sample = predictor_order,
i = predictor_order;
for (var partition = 0; partition < (1 << rice_order); partition++) {
var tmp = stream.read(method_type === 0 ? 4 : 5);
if (tmp === (method_type === 0 ? 15 : 31)) {
tmp = stream.read(5);
for (; i < samples; i++)
decoded[sample++] = stream.read(tmp, true);
} else {
for (; i < samples; i++)
decoded[sample++] = this.golomb(tmp, INT_MAX, 0);
}
i = 0;
}
};
const MIN_CACHE_BITS = 25;
this.prototype.golomb = function(k, limit, esc_len) {
var data = this.bitstream,
offset = data.bitPosition,
buf = data.peek(32 - offset) << offset,
v = 0;
var log = 31 - clz(buf | 1); // log2(buf)
if (log - k >= 32 - MIN_CACHE_BITS && 32 - log < limit) {
buf >>>= log - k;
buf += (30 - log) << k;
data.advance(32 + k - log);
v = buf;
} else {
for (var i = 0; data.read(1) === 0; i++)
buf = data.peek(32 - offset) << offset;
if (i < limit - 1) {
if (k)
buf = data.read(k);
else
buf = 0;
v = buf + (i << k);
} else if (i === limit - 1) {
buf = data.read(esc_len);
v = buf + 1;
} else {
v = -1;
}
}
return (v >> 1) ^ -(v & 1);
};
// Should be in the damned standard library...
function clz(input) {
var output = 0,
curbyte = 0;
while(true) { // emulate goto in JS using the break statement :D
curbyte = input >>> 24;
if (curbyte) break;
output += 8;
curbyte = input >>> 16;
if (curbyte & 0xff) break;
output += 8;
curbyte = input >>> 8;
if (curbyte & 0xff) break;
output += 8;
curbyte = input;
if (curbyte & 0xff) break;
output += 8;
return output;
}
if (!(curbyte & 0xf0))
output += 4;
else
curbyte >>>= 4;
if (curbyte & 0x8)
return output;
if (curbyte & 0x4)
return output + 1;
if (curbyte & 0x2)
return output + 2;
if (curbyte & 0x1)
return output + 3;
// shouldn't get here
return output + 4;
}
});
module.exports = FLACDecoder;
},{}],3:[function(require,module,exports){
/*
* FLAC.js - Free Lossless Audio Codec decoder in JavaScript
* By Devon Govett and Jens Nockert of Official.fm Labs
*
* FLAC.js is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FLAC.js is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
*/
var AV = (window.AV);
var FLACDemuxer = AV.Demuxer.extend(function() {
AV.Demuxer.register(this);
this.probe = function(buffer) {
return buffer.peekString(0, 4) === 'fLaC';
}
const STREAMINFO = 0,
PADDING = 1,
APPLICATION = 2,
SEEKTABLE = 3,
VORBIS_COMMENT = 4,
CUESHEET = 5,
PICTURE = 6,
INVALID = 127,
STREAMINFO_SIZE = 34;
this.prototype.readChunk = function() {
var stream = this.stream;
if (!this.readHeader && stream.available(4)) {
if (stream.readString(4) !== 'fLaC')
return this.emit('error', 'Invalid FLAC file.');
this.readHeader = true;
}
while (stream.available(1) && !this.last) {
if (!this.readBlockHeaders) {
var tmp = stream.readUInt8();
this.last = (tmp & 0x80) === 0x80,
this.type = tmp & 0x7F,
this.size = stream.readUInt24();
}
if (!this.foundStreamInfo && this.type !== STREAMINFO)
return this.emit('error', 'STREAMINFO must be the first block');
if (!stream.available(this.size))
return;
switch (this.type) {
case STREAMINFO:
if (this.foundStreamInfo)
return this.emit('error', 'STREAMINFO can only occur once.');
if (this.size !== STREAMINFO_SIZE)
return this.emit('error', 'STREAMINFO size is wrong.');
this.foundStreamInfo = true;
var bitstream = new AV.Bitstream(stream);
var cookie = {
minBlockSize: bitstream.read(16),
maxBlockSize: bitstream.read(16),
minFrameSize: bitstream.read(24),
maxFrameSize: bitstream.read(24)
};
this.format = {
formatID: 'flac',
sampleRate: bitstream.read(20),
channelsPerFrame: bitstream.read(3) + 1,
bitsPerChannel: bitstream.read(5) + 1
};
this.emit('format', this.format);
this.emit('cookie', cookie);
var sampleCount = bitstream.read(36);
this.emit('duration', sampleCount / this.format.sampleRate * 1000 | 0);
stream.advance(16); // skip MD5 hashes
this.readBlockHeaders = false;
break;
/*
I am only looking at the least significant 32 bits of sample number and offset data
This is more than sufficient for the longest flac file I have (~50 mins 2-channel 16-bit 44.1k which uses about 7.5% of the UInt32 space for the largest offset)
Can certainly be improved by storing sample numbers and offests as doubles, but would require additional overriding of the searchTimestamp and seek functions (possibly more?)
Also the flac faq suggests it would be possible to find frame lengths and thus create seek points on the fly via decoding but I assume this would be slow
I may look into these thigns though as my project progresses
*/
case SEEKTABLE:
for(var s=0; s<this.size/18; s++)
{
if(stream.peekUInt32(0) == 0xFFFFFFFF && stream.peekUInt32(1) == 0xFFFFFFFF)
{
//placeholder, ignore
stream.advance(18);
} else {
if(stream.readUInt32() > 0)
{
this.emit('error', 'Seek points with sample number >UInt32 not supported');
}
var samplenum = stream.readUInt32();
if(stream.readUInt32() > 0)
{
this.emit('error', 'Seek points with stream offset >UInt32 not supported');
}
var offset = stream.readUInt32();
stream.advance(2);
this.addSeekPoint(offset, samplenum);
}
}
break;
case VORBIS_COMMENT:
// see http://www.xiph.org/vorbis/doc/v-comment.html
this.metadata || (this.metadata = {});
var len = stream.readUInt32(true);
this.metadata.vendor = stream.readString(len);
var length = stream.readUInt32(true);
for (var i = 0; i < length; i++) {
len = stream.readUInt32(true);
var str = stream.readString(len, 'utf8'),
idx = str.indexOf('=');
this.metadata[str.slice(0, idx).toLowerCase()] = str.slice(idx + 1);
}
// TODO: standardize field names across formats
break;
case PICTURE:
var type = stream.readUInt32();
if (type !== 3) { // make sure this is album art (type 3)
stream.advance(this.size - 4);
} else {
var mimeLen = stream.readUInt32(),
mime = stream.readString(mimeLen),
descLen = stream.readUInt32(),
description = stream.readString(descLen),
width = stream.readUInt32(),
height = stream.readUInt32(),
depth = stream.readUInt32(),
colors = stream.readUInt32(),
length = stream.readUInt32(),
picture = stream.readBuffer(length);
this.metadata || (this.metadata = {});
this.metadata.coverArt = picture;
}
// does anyone want the rest of the info?
break;
default:
stream.advance(this.size);
this.readBlockHeaders = false;
}
if (this.last && this.metadata)
this.emit('metadata', this.metadata);
}
while (stream.available(1) && this.last) {
var buffer = stream.readSingleBuffer(stream.remainingBytes());
this.emit('data', buffer);
}
}
});
module.exports = FLACDemuxer;
},{}],4:[function(require,module,exports){
// if ogg.js exists, register a plugin
try {
var OggDemuxer = (window.AV.OggDemuxer);
} catch (e) {};
if (!OggDemuxer) return;
OggDemuxer.plugins.push({
magic: "\177FLAC",
init: function() {
this.list = new AV.BufferList();
this.stream = new AV.Stream(this.list);
},
readHeaders: function(packet) {
var stream = this.stream;
this.list.append(new AV.Buffer(packet));
stream.advance(5); // magic
if (stream.readUInt8() != 1)
throw new Error('Unsupported FLAC version');
stream.advance(3);
if (stream.peekString(0, 4) != 'fLaC')
throw new Error('Not flac');
this.flac = AV.Demuxer.find(stream.peekSingleBuffer(0, stream.remainingBytes()));
if (!this.flac)
throw new Error('Flac demuxer not found');
this.flac.prototype.readChunk.call(this);
return true;
},
readPacket: function(packet) {
this.list.append(new AV.Buffer(packet));
this.flac.prototype.readChunk.call(this);
}
});
},{}]},{},[1])
//# sourceMappingURL=flac.js.map | systems-rebooter/music | js/vendor/aurora/flac.js | JavaScript | agpl-3.0 | 25,523 |
import os.path
import time
from django.core.management.base import BaseCommand
from django.conf import settings
import mitxmako.middleware as middleware
from django.core.mail import send_mass_mail
import sys
import datetime
middleware.MakoMiddleware()
def chunks(l, n):
""" Yield successive n-sized chunks from l.
"""
for i in xrange(0, len(l), n):
yield l[i:i + n]
class Command(BaseCommand):
help = \
'''Sends an e-mail to all users in a text file.
E.g.
manage.py userlist.txt message logfile.txt rate
userlist.txt -- list of all users
message -- prefix for template with message
logfile.txt -- where to log progress
rate -- messages per second
'''
log_file = None
def hard_log(self, text):
self.log_file.write(datetime.datetime.utcnow().isoformat() + ' -- ' + text + '\n')
def handle(self, *args, **options):
(user_file, message_base, logfilename, ratestr) = args
users = [u.strip() for u in open(user_file).readlines()]
message = middleware.lookup['main'].get_template('emails/' + message_base + "_body.txt").render()
subject = middleware.lookup['main'].get_template('emails/' + message_base + "_subject.txt").render().strip()
rate = int(ratestr)
self.log_file = open(logfilename, "a+", buffering=0)
i = 0
for users in chunks(users, rate):
emails = [(subject, message, settings.DEFAULT_FROM_EMAIL, [u]) for u in users]
self.hard_log(" ".join(users))
send_mass_mail(emails, fail_silently=False)
time.sleep(1)
print datetime.datetime.utcnow().isoformat(), i
i = i + len(users)
# Emergency interruptor
if os.path.exists("/tmp/stopemails.txt"):
self.log_file.close()
sys.exit(-1)
self.log_file.close()
| kalebhartje/schoolboost | common/djangoapps/student/management/commands/massemailtxt.py | Python | agpl-3.0 | 1,862 |
/*
* /MathJax/localization/br/br.js
*
* Copyright (c) 2009-2015 The MathJax Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
MathJax.Localization.addTranslation( "br", null, {
menuTitle: "brezhoneg",
version: "2.5.0",
isLoaded: true,
domains: {
_: {
version: "2.5.0",
isLoaded: true,
strings: {
MathProcessingError: "Fazi o treta\u00F1 ar formulenn",
MathError: "Fazi er formulenn",
LoadFile: "O karga\u00F1 %1",
Loading: "O karga\u00F1",
LoadFailed: "N'eus ket bet gallet karga\u00F1 %1",
ProcessMath: "Treta\u00F1 ar formulenno\u00F9 : %1%%",
Processing: "O treta\u00F1",
TypesetMath: "Aoza\u00F1 formulenno\u00F9 : %1%%",
Typesetting: "Aoza\u00F1",
MathJaxNotSupported: "Ne c'hall ket ho merdeer ober gant MathJax"
}
}, FontWarnings: {}, "HTML-CSS": {}, HelpDialog: {}, MathML: {}, MathMenu: {}, TeX: {}
},
plural: function( a )
{
if (a % 10 === 1 && !(a % 100 === 11 || a % 100 === 71 || a % 100 === 91)) {
return 1
}
if (a % 10 === 2 && !(a % 100 === 12 || a % 100 === 72 || a % 100 === 92)) {
return 2
}
if ((a % 10 === 3 || a % 10 === 4 || a % 10 === 9) && !(10 <= a % 100 && a % 100 <= 19 || 70 <= a % 100 && a % 100 <= 79 || 90 <= a % 100 && a % 100 <= 99)) {
return 3
}
if (a !== 0 && a % 1000000 === 0) {
return 4
}
return 5
},
number: function( a )
{
return a
}
} );
MathJax.Ajax.loadComplete( "[MathJax]/localization/br/br.js" );
| hannesk001/SPHERE-Framework | Library/MathJax/2.5.0/localization/br/br.js | JavaScript | agpl-3.0 | 2,278 |
/* -*- Mode: IDL; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is the Places Command Controller.
*
* The Initial Developer of the Original Code is Google Inc.
*
* Portions created by the Initial Developer are Copyright (C) 2005
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Sungjoon Steve Won <stevewon@gmail.com> (Original Author)
* Asaf Romano <mano@mozilla.com>
* Marco Bonarco <mak77@bonardo.net>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
let Ci = Components.interfaces;
let Cc = Components.classes;
let Cr = Components.results;
const LOAD_IN_SIDEBAR_ANNO = "bookmarkProperties/loadInSidebar";
const DESCRIPTION_ANNO = "bookmarkProperties/description";
const GUID_ANNO = "placesInternal/GUID";
const CLASS_ID = Components.ID("c0844a84-5a12-4808-80a8-809cb002bb4f");
const CONTRACT_ID = "@mozilla.org/browser/placesTransactionsService;1";
Components.utils.import("resource://gre/modules/XPCOMUtils.jsm");
__defineGetter__("PlacesUtils", function() {
delete this.PlacesUtils
var tmpScope = {};
Components.utils.import("resource://gre/modules/utils.js", tmpScope);
return this.PlacesUtils = tmpScope.PlacesUtils;
});
// The minimum amount of transactions we should tell our observers to begin
// batching (rather than letting them do incremental drawing).
const MIN_TRANSACTIONS_FOR_BATCH = 5;
function placesTransactionsService() {
this.mTransactionManager = Cc["@mozilla.org/transactionmanager;1"].
createInstance(Ci.nsITransactionManager);
}
placesTransactionsService.prototype = {
classDescription: "Places Transaction Manager",
classID: CLASS_ID,
contractID: CONTRACT_ID,
QueryInterface: XPCOMUtils.generateQI([Ci.nsIPlacesTransactionsService,
Ci.nsITransactionManager]),
aggregateTransactions:
function placesTxn_aggregateTransactions(aName, aTransactions) {
return new placesAggregateTransactions(aName, aTransactions);
},
createFolder:
function placesTxn_createFolder(aName, aContainer, aIndex,
aAnnotations, aChildItemsTransactions) {
return new placesCreateFolderTransactions(aName, aContainer, aIndex,
aAnnotations, aChildItemsTransactions);
},
createItem:
function placesTxn_createItem(aURI, aContainer, aIndex, aTitle,
aKeyword, aAnnotations, aChildTransactions) {
return new placesCreateItemTransactions(aURI, aContainer, aIndex, aTitle,
aKeyword, aAnnotations, aChildTransactions);
},
createSeparator:
function placesTxn_createSeparator(aContainer, aIndex) {
return new placesCreateSeparatorTransactions(aContainer, aIndex);
},
createLivemark:
function placesTxn_createLivemark(aFeedURI, aSiteURI, aName,
aContainer, aIndex, aAnnotations) {
return new placesCreateLivemarkTransactions(aFeedURI, aSiteURI, aName,
aContainer, aIndex, aAnnotations);
},
moveItem:
function placesTxn_moveItem(aItemId, aNewContainer, aNewIndex) {
return new placesMoveItemTransactions(aItemId, aNewContainer, aNewIndex);
},
removeItem:
function placesTxn_removeItem(aItemId) {
if (aItemId == PlacesUtils.tagsFolderId ||
aItemId == PlacesUtils.placesRootId ||
aItemId == PlacesUtils.bookmarksMenuFolderId ||
aItemId == PlacesUtils.toolbarFolderId)
throw Cr.NS_ERROR_INVALID_ARG;
// if the item lives within a tag container, use the tagging transactions
var parent = PlacesUtils.bookmarks.getFolderIdForItem(aItemId);
var grandparent = PlacesUtils.bookmarks.getFolderIdForItem(parent);
if (grandparent == PlacesUtils.tagsFolderId) {
var uri = PlacesUtils.bookmarks.getBookmarkURI(aItemId);
return this.untagURI(uri, [parent]);
}
// if the item is a livemark container we will not save its children and
// will use createLivemark to undo.
if (PlacesUtils.itemIsLivemark(aItemId))
return new placesRemoveLivemarkTransaction(aItemId);
return new placesRemoveItemTransaction(aItemId);
},
editItemTitle:
function placesTxn_editItemTitle(aItemId, aNewTitle) {
return new placesEditItemTitleTransactions(aItemId, aNewTitle);
},
editBookmarkURI:
function placesTxn_editBookmarkURI(aItemId, aNewURI) {
return new placesEditBookmarkURITransactions(aItemId, aNewURI);
},
setItemAnnotation:
function placesTxn_setItemAnnotation(aItemId, aAnnotationObject) {
return new placesSetItemAnnotationTransactions(aItemId, aAnnotationObject);
},
setPageAnnotation:
function placesTxn_setPageAnnotation(aURI, aAnnotationObject) {
return new placesSetPageAnnotationTransactions(aURI, aAnnotationObject);
},
setLoadInSidebar:
function placesTxn_setLoadInSidebar(aItemId, aLoadInSidebar) {
var annoObj = { name: LOAD_IN_SIDEBAR_ANNO,
type: Ci.nsIAnnotationService.TYPE_INT32,
flags: 0,
value: aLoadInSidebar,
expires: Ci.nsIAnnotationService.EXPIRE_NEVER };
return this.setItemAnnotation(aItemId, annoObj);
},
editItemDescription:
function placesTxn_editItemDescription(aItemId, aDescription) {
var annoObj = { name: DESCRIPTION_ANNO,
type: Ci.nsIAnnotationService.TYPE_STRING,
flags: 0,
value: aDescription,
expires: Ci.nsIAnnotationService.EXPIRE_NEVER };
return this.setItemAnnotation(aItemId, annoObj);
},
editBookmarkKeyword:
function placesTxn_editBookmarkKeyword(aItemId, aNewKeyword) {
return new placesEditBookmarkKeywordTransactions(aItemId, aNewKeyword);
},
editBookmarkPostData:
function placesTxn_editBookmarkPostdata(aItemId, aPostData) {
return new placesEditBookmarkPostDataTransactions(aItemId, aPostData);
},
editLivemarkSiteURI:
function placesTxn_editLivemarkSiteURI(aLivemarkId, aSiteURI) {
return new placesEditLivemarkSiteURITransactions(aLivemarkId, aSiteURI);
},
editLivemarkFeedURI:
function placesTxn_editLivemarkFeedURI(aLivemarkId, aFeedURI) {
return new placesEditLivemarkFeedURITransactions(aLivemarkId, aFeedURI);
},
editBookmarkMicrosummary:
function placesTxn_editBookmarkMicrosummary(aItemId, aNewMicrosummary) {
return new placesEditBookmarkMicrosummaryTransactions(aItemId, aNewMicrosummary);
},
editItemDateAdded:
function placesTxn_editItemDateAdded(aItemId, aNewDateAdded) {
return new placesEditItemDateAddedTransaction(aItemId, aNewDateAdded);
},
editItemLastModified:
function placesTxn_editItemLastModified(aItemId, aNewLastModified) {
return new placesEditItemLastModifiedTransaction(aItemId, aNewLastModified);
},
sortFolderByName:
function placesTxn_sortFolderByName(aFolderId) {
return new placesSortFolderByNameTransactions(aFolderId);
},
tagURI:
function placesTxn_tagURI(aURI, aTags) {
return new placesTagURITransaction(aURI, aTags);
},
untagURI:
function placesTxn_untagURI(aURI, aTags) {
return new placesUntagURITransaction(aURI, aTags);
},
// Update commands in the undo group of the active window
// commands in inactive windows will are updated on-focus
_updateCommands: function placesTxn__updateCommands() {
var wm = Cc["@mozilla.org/appshell/window-mediator;1"].
getService(Ci.nsIWindowMediator);
var win = wm.getMostRecentWindow(null);
if (win)
win.updateCommands("undo");
},
// nsITransactionManager
beginBatch: function() {
this.mTransactionManager.beginBatch();
// A no-op transaction is pushed to the stack, in order to make safe and
// easy to implement "Undo" an unknown number of transactions (including 0),
// "above" beginBatch and endBatch. Otherwise,implementing Undo that way
// head to dataloss: for example, if no changes were done in the
// edit-item panel, the last transaction on the undo stack would be the
// initial createItem transaction, or even worse, the batched editing of
// some other item.
// DO NOT MOVE this to the window scope, that would leak (bug 490068)!
this.doTransaction({ doTransaction: function() { },
undoTransaction: function() { },
redoTransaction: function() { },
isTransient: false,
merge: function() { return false; } });
},
endBatch: function() this.mTransactionManager.endBatch(),
doTransaction: function placesTxn_doTransaction(txn) {
this.mTransactionManager.doTransaction(txn);
this._updateCommands();
},
undoTransaction: function placesTxn_undoTransaction() {
this.mTransactionManager.undoTransaction();
this._updateCommands();
},
redoTransaction: function placesTxn_redoTransaction() {
this.mTransactionManager.redoTransaction();
this._updateCommands();
},
clear: function() this.mTransactionManager.clear(),
get numberOfUndoItems() {
return this.mTransactionManager.numberOfUndoItems;
},
get numberOfRedoItems() {
return this.mTransactionManager.numberOfRedoItems;
},
get maxTransactionCount() {
return this.mTransactionManager.maxTransactionCount;
},
set maxTransactionCount(val) {
return this.mTransactionManager.maxTransactionCount = val;
},
peekUndoStack: function() this.mTransactionManager.peekUndoStack(),
peekRedoStack: function() this.mTransactionManager.peekRedoStack(),
getUndoStack: function() this.mTransactionManager.getUndoStack(),
getRedoStack: function() this.mTransactionManager.getRedoStack(),
AddListener: function(l) this.mTransactionManager.AddListener(l),
RemoveListener: function(l) this.mTransactionManager.RemoveListener(l)
};
/**
* Method and utility stubs for Places Edit Transactions
*/
function placesBaseTransaction() {
}
placesBaseTransaction.prototype = {
// for child-transactions
get wrappedJSObject() {
return this;
},
// nsITransaction
redoTransaction: function PBT_redoTransaction() {
throw Cr.NS_ERROR_NOT_IMPLEMENTED;
},
get isTransient() {
return false;
},
merge: function mergeFunc(transaction) {
return false;
},
// nsISupports
QueryInterface: XPCOMUtils.generateQI([Ci.nsITransaction]),
};
function placesAggregateTransactions(name, transactions) {
this._transactions = transactions;
this._name = name;
this.container = -1;
this.redoTransaction = this.doTransaction;
// Check child transactions number. We will batch if we have more than
// MIN_TRANSACTIONS_FOR_BATCH total number of transactions.
var countTransactions = function(aTransactions, aTxnCount) {
for (let i = 0;
i < aTransactions.length && aTxnCount < MIN_TRANSACTIONS_FOR_BATCH;
i++, aTxnCount++) {
let txn = aTransactions[i].wrappedJSObject;
if (txn && txn.childTransactions && txn.childTransactions.length)
aTxnCount = countTransactions(txn.childTransactions, aTxnCount);
}
return aTxnCount;
}
var txnCount = countTransactions(transactions, 0);
this._useBatch = txnCount >= MIN_TRANSACTIONS_FOR_BATCH;
}
placesAggregateTransactions.prototype = {
__proto__: placesBaseTransaction.prototype,
doTransaction: function PAT_doTransaction() {
if (this._useBatch) {
var callback = {
_self: this,
runBatched: function() {
this._self.commit(false);
}
};
PlacesUtils.bookmarks.runInBatchMode(callback, null);
}
else
this.commit(false);
},
undoTransaction: function PAT_undoTransaction() {
if (this._useBatch) {
var callback = {
_self: this,
runBatched: function() {
this._self.commit(true);
}
};
PlacesUtils.bookmarks.runInBatchMode(callback, null);
}
else
this.commit(true);
},
commit: function PAT_commit(aUndo) {
// Use a copy of the transactions array, so we won't reverse the original
// one on undoing.
var transactions = this._transactions.slice(0);
if (aUndo)
transactions.reverse();
for (var i = 0; i < transactions.length; i++) {
var txn = transactions[i];
if (this.container > -1)
txn.wrappedJSObject.container = this.container;
if (aUndo)
txn.undoTransaction();
else
txn.doTransaction();
}
}
};
function placesCreateFolderTransactions(aName, aContainer, aIndex,
aAnnotations,
aChildItemsTransactions) {
this._name = aName;
this._container = aContainer;
this._index = typeof(aIndex) == "number" ? aIndex : -1;
this._annotations = aAnnotations;
this._id = null;
this.childTransactions = aChildItemsTransactions || [];
this.redoTransaction = this.doTransaction;
}
placesCreateFolderTransactions.prototype = {
__proto__: placesBaseTransaction.prototype,
// childItemsTransaction support
get container() { return this._container; },
set container(val) { return this._container = val; },
doTransaction: function PCFT_doTransaction() {
this._id = PlacesUtils.bookmarks.createFolder(this._container,
this._name, this._index);
if (this._annotations && this._annotations.length > 0)
PlacesUtils.setAnnotationsForItem(this._id, this._annotations);
if (this.childTransactions.length) {
// Set the new container id into child transactions.
for (var i = 0; i < this.childTransactions.length; ++i) {
this.childTransactions[i].wrappedJSObject.container = this._id;
}
let aggregateTxn = new placesAggregateTransactions("Create folder childTxn",
this.childTransactions);
aggregateTxn.doTransaction();
}
if (this._GUID)
PlacesUtils.bookmarks.setItemGUID(this._id, this._GUID);
},
undoTransaction: function PCFT_undoTransaction() {
if (this.childTransactions.length) {
let aggregateTxn = new placesAggregateTransactions("Create folder childTxn",
this.childTransactions);
aggregateTxn.undoTransaction();
}
// If a GUID exists for this item, preserve it before removing the item.
if (PlacesUtils.annotations.itemHasAnnotation(this._id, GUID_ANNO))
this._GUID = PlacesUtils.bookmarks.getItemGUID(this._id);
// Remove item only after all child transactions have been reverted.
PlacesUtils.bookmarks.removeItem(this._id);
}
};
function placesCreateItemTransactions(aURI, aContainer, aIndex, aTitle,
aKeyword, aAnnotations,
aChildTransactions) {
this._uri = aURI;
this._container = aContainer;
this._index = typeof(aIndex) == "number" ? aIndex : -1;
this._title = aTitle;
this._keyword = aKeyword;
this._annotations = aAnnotations;
this.childTransactions = aChildTransactions || [];
this.redoTransaction = this.doTransaction;
}
placesCreateItemTransactions.prototype = {
__proto__: placesBaseTransaction.prototype,
// childItemsTransactions support for the create-folder transaction
get container() { return this._container; },
set container(val) { return this._container = val; },
doTransaction: function PCIT_doTransaction() {
this._id = PlacesUtils.bookmarks.insertBookmark(this.container, this._uri,
this._index, this._title);
if (this._keyword)
PlacesUtils.bookmarks.setKeywordForBookmark(this._id, this._keyword);
if (this._annotations && this._annotations.length > 0)
PlacesUtils.setAnnotationsForItem(this._id, this._annotations);
if (this.childTransactions.length) {
// Set the new item id into child transactions.
for (var i = 0; i < this.childTransactions.length; ++i) {
this.childTransactions[i].wrappedJSObject.id = this._id;
}
let aggregateTxn = new placesAggregateTransactions("Create item childTxn",
this.childTransactions);
aggregateTxn.doTransaction();
}
if (this._GUID)
PlacesUtils.bookmarks.setItemGUID(this._id, this._GUID);
},
undoTransaction: function PCIT_undoTransaction() {
if (this.childTransactions.length) {
// Undo transactions should always be done in reverse order.
let aggregateTxn = new placesAggregateTransactions("Create item childTxn",
this.childTransactions);
aggregateTxn.undoTransaction();
}
// If a GUID exists for this item, preserve it before removing the item.
if (PlacesUtils.annotations.itemHasAnnotation(this._id, GUID_ANNO))
this._GUID = PlacesUtils.bookmarks.getItemGUID(this._id);
// Remove item only after all child transactions have been reverted.
PlacesUtils.bookmarks.removeItem(this._id);
}
};
function placesCreateSeparatorTransactions(aContainer, aIndex) {
this._container = aContainer;
this._index = typeof(aIndex) == "number" ? aIndex : -1;
this._id = null;
this.redoTransaction = this.doTransaction;
}
placesCreateSeparatorTransactions.prototype = {
__proto__: placesBaseTransaction.prototype,
// childItemsTransaction support
get container() { return this._container; },
set container(val) { return this._container = val; },
doTransaction: function PCST_doTransaction() {
this._id = PlacesUtils.bookmarks
.insertSeparator(this.container, this._index);
if (this._GUID)
PlacesUtils.bookmarks.setItemGUID(this._id, this._GUID);
},
undoTransaction: function PCST_undoTransaction() {
// If a GUID exists for this item, preserve it before removing the item.
if (PlacesUtils.annotations.itemHasAnnotation(this._id, GUID_ANNO))
this._GUID = PlacesUtils.bookmarks.getItemGUID(this._id);
PlacesUtils.bookmarks.removeItem(this._id);
}
};
function placesCreateLivemarkTransactions(aFeedURI, aSiteURI, aName,
aContainer, aIndex,
aAnnotations) {
this.redoTransaction = this.doTransaction;
this._feedURI = aFeedURI;
this._siteURI = aSiteURI;
this._name = aName;
this._container = aContainer;
this._index = typeof(aIndex) == "number" ? aIndex : -1;
this._annotations = aAnnotations;
}
placesCreateLivemarkTransactions.prototype = {
__proto__: placesBaseTransaction.prototype,
// childItemsTransaction support
get container() { return this._container; },
set container(val) { return this._container = val; },
doTransaction: function PCLT_doTransaction() {
this._id = PlacesUtils.livemarks.createLivemark(this._container, this._name,
this._siteURI, this._feedURI,
this._index);
if (this._annotations && this._annotations.length > 0)
PlacesUtils.setAnnotationsForItem(this._id, this._annotations);
if (this._GUID)
PlacesUtils.bookmarks.setItemGUID(this._id, this._GUID);
},
undoTransaction: function PCLT_undoTransaction() {
// If a GUID exists for this item, preserve it before removing the item.
if (PlacesUtils.annotations.itemHasAnnotation(this._id, GUID_ANNO))
this._GUID = PlacesUtils.bookmarks.getItemGUID(this._id);
PlacesUtils.bookmarks.removeItem(this._id);
}
};
function placesRemoveLivemarkTransaction(aFolderId) {
this.redoTransaction = this.doTransaction;
this._id = aFolderId;
this._title = PlacesUtils.bookmarks.getItemTitle(this._id);
this._container = PlacesUtils.bookmarks.getFolderIdForItem(this._id);
var annos = PlacesUtils.getAnnotationsForItem(this._id);
// Exclude livemark service annotations, those will be recreated automatically
var annosToExclude = ["livemark/feedURI",
"livemark/siteURI",
"livemark/expiration",
"livemark/loadfailed",
"livemark/loading"];
this._annotations = annos.filter(function(aValue, aIndex, aArray) {
return annosToExclude.indexOf(aValue.name) == -1;
});
this._feedURI = PlacesUtils.livemarks.getFeedURI(this._id);
this._siteURI = PlacesUtils.livemarks.getSiteURI(this._id);
this._dateAdded = PlacesUtils.bookmarks.getItemDateAdded(this._id);
this._lastModified = PlacesUtils.bookmarks.getItemLastModified(this._id);
}
placesRemoveLivemarkTransaction.prototype = {
__proto__: placesBaseTransaction.prototype,
doTransaction: function PRLT_doTransaction() {
this._index = PlacesUtils.bookmarks.getItemIndex(this._id);
PlacesUtils.bookmarks.removeItem(this._id);
},
undoTransaction: function PRLT_undoTransaction() {
this._id = PlacesUtils.livemarks.createLivemark(this._container,
this._title,
this._siteURI,
this._feedURI,
this._index);
PlacesUtils.bookmarks.setItemDateAdded(this._id, this._dateAdded);
PlacesUtils.bookmarks.setItemLastModified(this._id, this._lastModified);
// Restore annotations
PlacesUtils.setAnnotationsForItem(this._id, this._annotations);
}
};
function placesMoveItemTransactions(aItemId, aNewContainer, aNewIndex) {
this._id = aItemId;
this._oldContainer = PlacesUtils.bookmarks.getFolderIdForItem(this._id);
this._newContainer = aNewContainer;
this._newIndex = aNewIndex;
this.redoTransaction = this.doTransaction;
}
placesMoveItemTransactions.prototype = {
__proto__: placesBaseTransaction.prototype,
doTransaction: function PMIT_doTransaction() {
this._oldIndex = PlacesUtils.bookmarks.getItemIndex(this._id);
PlacesUtils.bookmarks.moveItem(this._id, this._newContainer, this._newIndex);
this._undoIndex = PlacesUtils.bookmarks.getItemIndex(this._id);
},
undoTransaction: function PMIT_undoTransaction() {
// moving down in the same container takes in count removal of the item
// so to revert positions we must move to oldIndex + 1
if (this._newContainer == this._oldContainer &&
this._oldIndex > this._undoIndex)
PlacesUtils.bookmarks.moveItem(this._id, this._oldContainer, this._oldIndex + 1);
else
PlacesUtils.bookmarks.moveItem(this._id, this._oldContainer, this._oldIndex);
}
};
function placesRemoveItemTransaction(aItemId) {
this.redoTransaction = this.doTransaction;
this._id = aItemId;
this._itemType = PlacesUtils.bookmarks.getItemType(this._id);
if (this._itemType == Ci.nsINavBookmarksService.TYPE_FOLDER) {
this.childTransactions = this._getFolderContentsTransactions();
// Remove this folder itself.
let txn = PlacesUtils.bookmarks.getRemoveFolderTransaction(this._id);
this.childTransactions.push(txn);
}
else if (this._itemType == Ci.nsINavBookmarksService.TYPE_BOOKMARK) {
this._uri = PlacesUtils.bookmarks.getBookmarkURI(this._id);
this._keyword = PlacesUtils.bookmarks.getKeywordForBookmark(this._id);
}
if (this._itemType != Ci.nsINavBookmarksService.TYPE_SEPARATOR)
this._title = PlacesUtils.bookmarks.getItemTitle(this._id);
this._oldContainer = PlacesUtils.bookmarks.getFolderIdForItem(this._id);
this._annotations = PlacesUtils.getAnnotationsForItem(this._id);
this._dateAdded = PlacesUtils.bookmarks.getItemDateAdded(this._id);
this._lastModified = PlacesUtils.bookmarks.getItemLastModified(this._id);
}
placesRemoveItemTransaction.prototype = {
__proto__: placesBaseTransaction.prototype,
doTransaction: function PRIT_doTransaction() {
this._oldIndex = PlacesUtils.bookmarks.getItemIndex(this._id);
if (this._itemType == Ci.nsINavBookmarksService.TYPE_FOLDER) {
let aggregateTxn = new placesAggregateTransactions("Remove item childTxn",
this.childTransactions);
aggregateTxn.doTransaction();
}
else {
PlacesUtils.bookmarks.removeItem(this._id);
if (this._uri) {
// if this was the last bookmark (excluding tag-items and livemark
// children, see getMostRecentBookmarkForURI) for the bookmark's url,
// remove the url from tag containers as well.
if (PlacesUtils.getMostRecentBookmarkForURI(this._uri) == -1) {
this._tags = PlacesUtils.tagging.getTagsForURI(this._uri, {});
PlacesUtils.tagging.untagURI(this._uri, this._tags);
}
}
}
},
undoTransaction: function PRIT_undoTransaction() {
if (this._itemType == Ci.nsINavBookmarksService.TYPE_BOOKMARK) {
this._id = PlacesUtils.bookmarks.insertBookmark(this._oldContainer,
this._uri,
this._oldIndex,
this._title);
if (this._tags && this._tags.length > 0)
PlacesUtils.tagging.tagURI(this._uri, this._tags);
if (this._keyword)
PlacesUtils.bookmarks.setKeywordForBookmark(this._id, this._keyword);
}
else if (this._itemType == Ci.nsINavBookmarksService.TYPE_FOLDER) {
let aggregateTxn = new placesAggregateTransactions("Remove item childTxn",
this.childTransactions);
aggregateTxn.undoTransaction();
}
else // TYPE_SEPARATOR
this._id = PlacesUtils.bookmarks.insertSeparator(this._oldContainer, this._oldIndex);
if (this._annotations.length > 0)
PlacesUtils.setAnnotationsForItem(this._id, this._annotations);
PlacesUtils.bookmarks.setItemDateAdded(this._id, this._dateAdded);
PlacesUtils.bookmarks.setItemLastModified(this._id, this._lastModified);
},
/**
* Returns a flat, ordered list of transactions for a depth-first recreation
* of items within this folder.
*/
_getFolderContentsTransactions:
function PRIT__getFolderContentsTransactions() {
var transactions = [];
var contents =
PlacesUtils.getFolderContents(this._id, false, false).root;
for (var i = 0; i < contents.childCount; ++i) {
let txn = new placesRemoveItemTransaction(contents.getChild(i).itemId);
transactions.push(txn);
}
contents.containerOpen = false;
// Reverse transactions to preserve parent-child relationship.
return transactions.reverse();
}
};
function placesEditItemTitleTransactions(id, newTitle) {
this._id = id;
this._newTitle = newTitle;
this._oldTitle = "";
this.redoTransaction = this.doTransaction;
}
placesEditItemTitleTransactions.prototype = {
__proto__: placesBaseTransaction.prototype,
doTransaction: function PEITT_doTransaction() {
this._oldTitle = PlacesUtils.bookmarks.getItemTitle(this._id);
PlacesUtils.bookmarks.setItemTitle(this._id, this._newTitle);
},
undoTransaction: function PEITT_undoTransaction() {
PlacesUtils.bookmarks.setItemTitle(this._id, this._oldTitle);
}
};
function placesEditBookmarkURITransactions(aBookmarkId, aNewURI) {
this._id = aBookmarkId;
this._newURI = aNewURI;
this.redoTransaction = this.doTransaction;
}
placesEditBookmarkURITransactions.prototype = {
__proto__: placesBaseTransaction.prototype,
doTransaction: function PEBUT_doTransaction() {
this._oldURI = PlacesUtils.bookmarks.getBookmarkURI(this._id);
PlacesUtils.bookmarks.changeBookmarkURI(this._id, this._newURI);
// move tags from old URI to new URI
this._tags = PlacesUtils.tagging.getTagsForURI(this._oldURI, {});
if (this._tags.length != 0) {
// only untag the old URI if this is the only bookmark
if (PlacesUtils.getBookmarksForURI(this._oldURI, {}).length == 0)
PlacesUtils.tagging.untagURI(this._oldURI, this._tags);
PlacesUtils.tagging.tagURI(this._newURI, this._tags);
}
},
undoTransaction: function PEBUT_undoTransaction() {
PlacesUtils.bookmarks.changeBookmarkURI(this._id, this._oldURI);
// move tags from new URI to old URI
if (this._tags.length != 0) {
// only untag the new URI if this is the only bookmark
if (PlacesUtils.getBookmarksForURI(this._newURI, {}).length == 0)
PlacesUtils.tagging.untagURI(this._newURI, this._tags);
PlacesUtils.tagging.tagURI(this._oldURI, this._tags);
}
}
};
function placesSetItemAnnotationTransactions(aItemId, aAnnotationObject) {
this.id = aItemId;
this._anno = aAnnotationObject;
// create an empty old anno
this._oldAnno = { name: this._anno.name,
type: Ci.nsIAnnotationService.TYPE_STRING,
flags: 0,
value: null,
expires: Ci.nsIAnnotationService.EXPIRE_NEVER };
this.redoTransaction = this.doTransaction;
}
placesSetItemAnnotationTransactions.prototype = {
__proto__: placesBaseTransaction.prototype,
doTransaction: function PSIAT_doTransaction() {
// Since this can be used as a child transaction this.id will be known
// only at this point, after the external caller has set it.
if (PlacesUtils.annotations.itemHasAnnotation(this.id, this._anno.name)) {
// Save the old annotation if it is set.
var flags = {}, expires = {}, mimeType = {}, type = {};
PlacesUtils.annotations.getItemAnnotationInfo(this.id, this._anno.name,
flags, expires, mimeType,
type);
this._oldAnno.flags = flags.value;
this._oldAnno.expires = expires.value;
this._oldAnno.mimeType = mimeType.value;
this._oldAnno.type = type.value;
this._oldAnno.value = PlacesUtils.annotations
.getItemAnnotation(this.id,
this._anno.name);
}
PlacesUtils.setAnnotationsForItem(this.id, [this._anno]);
},
undoTransaction: function PSIAT_undoTransaction() {
PlacesUtils.setAnnotationsForItem(this.id, [this._oldAnno]);
}
};
function placesSetPageAnnotationTransactions(aURI, aAnnotationObject) {
this._uri = aURI;
this._anno = aAnnotationObject;
// create an empty old anno
this._oldAnno = { name: this._anno.name,
type: Ci.nsIAnnotationService.TYPE_STRING,
flags: 0,
value: null,
expires: Ci.nsIAnnotationService.EXPIRE_NEVER };
if (PlacesUtils.annotations.pageHasAnnotation(this._uri, this._anno.name)) {
// fill the old anno if it is set
var flags = {}, expires = {}, mimeType = {}, type = {};
PlacesUtils.annotations.getPageAnnotationInfo(this._uri, this._anno.name,
flags, expires, mimeType, type);
this._oldAnno.flags = flags.value;
this._oldAnno.expires = expires.value;
this._oldAnno.mimeType = mimeType.value;
this._oldAnno.type = type.value;
this._oldAnno.value = PlacesUtils.annotations
.getPageAnnotation(this._uri, this._anno.name);
}
this.redoTransaction = this.doTransaction;
}
placesSetPageAnnotationTransactions.prototype = {
__proto__: placesBaseTransaction.prototype,
doTransaction: function PSPAT_doTransaction() {
PlacesUtils.setAnnotationsForURI(this._uri, [this._anno]);
},
undoTransaction: function PSPAT_undoTransaction() {
PlacesUtils.setAnnotationsForURI(this._uri, [this._oldAnno]);
}
};
function placesEditBookmarkKeywordTransactions(id, newKeyword) {
this.id = id;
this._newKeyword = newKeyword;
this._oldKeyword = "";
this.redoTransaction = this.doTransaction;
}
placesEditBookmarkKeywordTransactions.prototype = {
__proto__: placesBaseTransaction.prototype,
doTransaction: function PEBKT_doTransaction() {
this._oldKeyword = PlacesUtils.bookmarks.getKeywordForBookmark(this.id);
PlacesUtils.bookmarks.setKeywordForBookmark(this.id, this._newKeyword);
},
undoTransaction: function PEBKT_undoTransaction() {
PlacesUtils.bookmarks.setKeywordForBookmark(this.id, this._oldKeyword);
}
};
function placesEditBookmarkPostDataTransactions(aItemId, aPostData) {
this.id = aItemId;
this._newPostData = aPostData;
this._oldPostData = null;
this.redoTransaction = this.doTransaction;
}
placesEditBookmarkPostDataTransactions.prototype = {
__proto__: placesBaseTransaction.prototype,
doTransaction: function PEUPDT_doTransaction() {
this._oldPostData = PlacesUtils.getPostDataForBookmark(this.id);
PlacesUtils.setPostDataForBookmark(this.id, this._newPostData);
},
undoTransaction: function PEUPDT_undoTransaction() {
PlacesUtils.setPostDataForBookmark(this.id, this._oldPostData);
}
};
function placesEditLivemarkSiteURITransactions(folderId, uri) {
this._folderId = folderId;
this._newURI = uri;
this._oldURI = null;
this.redoTransaction = this.doTransaction;
}
placesEditLivemarkSiteURITransactions.prototype = {
__proto__: placesBaseTransaction.prototype,
doTransaction: function PELSUT_doTransaction() {
this._oldURI = PlacesUtils.livemarks.getSiteURI(this._folderId);
PlacesUtils.livemarks.setSiteURI(this._folderId, this._newURI);
},
undoTransaction: function PELSUT_undoTransaction() {
PlacesUtils.livemarks.setSiteURI(this._folderId, this._oldURI);
}
};
function placesEditLivemarkFeedURITransactions(folderId, uri) {
this._folderId = folderId;
this._newURI = uri;
this._oldURI = null;
this.redoTransaction = this.doTransaction;
}
placesEditLivemarkFeedURITransactions.prototype = {
__proto__: placesBaseTransaction.prototype,
doTransaction: function PELFUT_doTransaction() {
this._oldURI = PlacesUtils.livemarks.getFeedURI(this._folderId);
PlacesUtils.livemarks.setFeedURI(this._folderId, this._newURI);
PlacesUtils.livemarks.reloadLivemarkFolder(this._folderId);
},
undoTransaction: function PELFUT_undoTransaction() {
PlacesUtils.livemarks.setFeedURI(this._folderId, this._oldURI);
PlacesUtils.livemarks.reloadLivemarkFolder(this._folderId);
}
};
function placesEditBookmarkMicrosummaryTransactions(aItemId, newMicrosummary) {
this.id = aItemId;
this._mss = Cc["@mozilla.org/microsummary/service;1"].
getService(Ci.nsIMicrosummaryService);
this._newMicrosummary = newMicrosummary;
this._oldMicrosummary = null;
this.redoTransaction = this.doTransaction;
}
placesEditBookmarkMicrosummaryTransactions.prototype = {
__proto__: placesBaseTransaction.prototype,
doTransaction: function PEBMT_doTransaction() {
this._oldMicrosummary = this._mss.getMicrosummary(this.id);
if (this._newMicrosummary)
this._mss.setMicrosummary(this.id, this._newMicrosummary);
else
this._mss.removeMicrosummary(this.id);
},
undoTransaction: function PEBMT_undoTransaction() {
if (this._oldMicrosummary)
this._mss.setMicrosummary(this.id, this._oldMicrosummary);
else
this._mss.removeMicrosummary(this.id);
}
};
function placesEditItemDateAddedTransaction(id, newDateAdded) {
this.id = id;
this._newDateAdded = newDateAdded;
this._oldDateAdded = null;
this.redoTransaction = this.doTransaction;
}
placesEditItemDateAddedTransaction.prototype = {
__proto__: placesBaseTransaction.prototype,
// to support folders as well
get container() { return this.id; },
set container(val) { return this.id = val; },
doTransaction: function PEIDA_doTransaction() {
this._oldDateAdded = PlacesUtils.bookmarks.getItemDateAdded(this.id);
PlacesUtils.bookmarks.setItemDateAdded(this.id, this._newDateAdded);
},
undoTransaction: function PEIDA_undoTransaction() {
PlacesUtils.bookmarks.setItemDateAdded(this.id, this._oldDateAdded);
}
};
function placesEditItemLastModifiedTransaction(id, newLastModified) {
this.id = id;
this._newLastModified = newLastModified;
this._oldLastModified = null;
this.redoTransaction = this.doTransaction;
}
placesEditItemLastModifiedTransaction.prototype = {
__proto__: placesBaseTransaction.prototype,
// to support folders as well
get container() { return this.id; },
set container(val) { return this.id = val; },
doTransaction: function PEILM_doTransaction() {
this._oldLastModified = PlacesUtils.bookmarks.getItemLastModified(this.id);
PlacesUtils.bookmarks.setItemLastModified(this.id, this._newLastModified);
},
undoTransaction: function PEILM_undoTransaction() {
PlacesUtils.bookmarks.setItemLastModified(this.id, this._oldLastModified);
}
};
function placesSortFolderByNameTransactions(aFolderId) {
this._folderId = aFolderId;
this._oldOrder = null,
this.redoTransaction = this.doTransaction;
}
placesSortFolderByNameTransactions.prototype = {
__proto__: placesBaseTransaction.prototype,
doTransaction: function PSSFBN_doTransaction() {
this._oldOrder = [];
var contents =
PlacesUtils.getFolderContents(this._folderId, false, false).root;
var count = contents.childCount;
// sort between separators
var newOrder = [];
var preSep = []; // temporary array for sorting each group of items
var sortingMethod =
function (a, b) {
if (PlacesUtils.nodeIsContainer(a) && !PlacesUtils.nodeIsContainer(b))
return -1;
if (!PlacesUtils.nodeIsContainer(a) && PlacesUtils.nodeIsContainer(b))
return 1;
return a.title.localeCompare(b.title);
};
for (var i = 0; i < count; ++i) {
var item = contents.getChild(i);
this._oldOrder[item.itemId] = i;
if (PlacesUtils.nodeIsSeparator(item)) {
if (preSep.length > 0) {
preSep.sort(sortingMethod);
newOrder = newOrder.concat(preSep);
preSep.splice(0);
}
newOrder.push(item);
}
else
preSep.push(item);
}
contents.containerOpen = false;
if (preSep.length > 0) {
preSep.sort(sortingMethod);
newOrder = newOrder.concat(preSep);
}
// set the nex indexes
var callback = {
runBatched: function() {
for (var i = 0; i < newOrder.length; ++i) {
PlacesUtils.bookmarks.setItemIndex(newOrder[i].itemId, i);
}
}
};
PlacesUtils.bookmarks.runInBatchMode(callback, null);
},
undoTransaction: function PSSFBN_undoTransaction() {
var callback = {
_self: this,
runBatched: function() {
for (item in this._self._oldOrder)
PlacesUtils.bookmarks.setItemIndex(item, this._self._oldOrder[item]);
}
};
PlacesUtils.bookmarks.runInBatchMode(callback, null);
}
};
function placesTagURITransaction(aURI, aTags) {
this._uri = aURI;
this._tags = aTags;
this._unfiledItemId = -1;
this.redoTransaction = this.doTransaction;
}
placesTagURITransaction.prototype = {
__proto__: placesBaseTransaction.prototype,
doTransaction: function PTU_doTransaction() {
if (PlacesUtils.getMostRecentBookmarkForURI(this._uri) == -1) {
// Force an unfiled bookmark first
this._unfiledItemId =
PlacesUtils.bookmarks
.insertBookmark(PlacesUtils.unfiledBookmarksFolderId,
this._uri,
PlacesUtils.bookmarks.DEFAULT_INDEX,
PlacesUtils.history.getPageTitle(this._uri));
if (this._GUID)
PlacesUtils.bookmarks.setItemGUID(this._unfiledItemId, this._GUID);
}
PlacesUtils.tagging.tagURI(this._uri, this._tags);
},
undoTransaction: function PTU_undoTransaction() {
if (this._unfiledItemId != -1) {
// If a GUID exists for this item, preserve it before removing the item.
if (PlacesUtils.annotations.itemHasAnnotation(this._unfiledItemId, GUID_ANNO)) {
this._GUID = PlacesUtils.bookmarks.getItemGUID(this._unfiledItemId);
}
PlacesUtils.bookmarks.removeItem(this._unfiledItemId);
this._unfiledItemId = -1;
}
PlacesUtils.tagging.untagURI(this._uri, this._tags);
}
};
function placesUntagURITransaction(aURI, aTags) {
this._uri = aURI;
if (aTags) {
// Within this transaction, we cannot rely on tags given by itemId
// since the tag containers may be gone after we call untagURI.
// Thus, we convert each tag given by its itemId to name.
this._tags = aTags;
for (var i=0; i < aTags.length; i++) {
if (typeof(this._tags[i]) == "number")
this._tags[i] = PlacesUtils.bookmarks.getItemTitle(this._tags[i]);
}
}
else
this._tags = PlacesUtils.tagging.getTagsForURI(this._uri, {});
this.redoTransaction = this.doTransaction;
}
placesUntagURITransaction.prototype = {
__proto__: placesBaseTransaction.prototype,
doTransaction: function PUTU_doTransaction() {
PlacesUtils.tagging.untagURI(this._uri, this._tags);
},
undoTransaction: function PUTU_undoTransaction() {
PlacesUtils.tagging.tagURI(this._uri, this._tags);
}
};
function NSGetModule(aCompMgr, aFileSpec) {
return XPCOMUtils.generateModule([placesTransactionsService]);
}
| csinitiative/trisano | webapp/features/support/firefox-36/components/nsPlacesTransactionsService.js | JavaScript | agpl-3.0 | 42,772 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2014 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import itertools
import logging
from functools import partial
from itertools import repeat
from lxml import etree
from lxml.builder import E
import openerp
from openerp import SUPERUSER_ID, models
from openerp import tools
import openerp.exceptions
from openerp.osv import fields, osv, expression
from openerp.tools.translate import _
from openerp.http import request
_logger = logging.getLogger(__name__)
#----------------------------------------------------------
# Basic res.groups and res.users
#----------------------------------------------------------
class res_groups(osv.osv):
_name = "res.groups"
_description = "Access Groups"
_rec_name = 'full_name'
_order = 'name'
def _get_full_name(self, cr, uid, ids, field, arg, context=None):
res = {}
for g in self.browse(cr, uid, ids, context):
if g.category_id:
res[g.id] = '%s / %s' % (g.category_id.name, g.name)
else:
res[g.id] = g.name
return res
def _search_group(self, cr, uid, obj, name, args, context=None):
operand = args[0][2]
operator = args[0][1]
lst = True
if isinstance(operand, bool):
domains = [[('name', operator, operand)], [('category_id.name', operator, operand)]]
if operator in expression.NEGATIVE_TERM_OPERATORS == (not operand):
return expression.AND(domains)
else:
return expression.OR(domains)
if isinstance(operand, basestring):
lst = False
operand = [operand]
where = []
for group in operand:
values = filter(bool, group.split('/'))
group_name = values.pop().strip()
category_name = values and '/'.join(values).strip() or group_name
group_domain = [('name', operator, lst and [group_name] or group_name)]
category_domain = [('category_id.name', operator, lst and [category_name] or category_name)]
if operator in expression.NEGATIVE_TERM_OPERATORS and not values:
category_domain = expression.OR([category_domain, [('category_id', '=', False)]])
if (operator in expression.NEGATIVE_TERM_OPERATORS) == (not values):
sub_where = expression.AND([group_domain, category_domain])
else:
sub_where = expression.OR([group_domain, category_domain])
if operator in expression.NEGATIVE_TERM_OPERATORS:
where = expression.AND([where, sub_where])
else:
where = expression.OR([where, sub_where])
return where
_columns = {
'name': fields.char('Name', required=True, translate=True),
'users': fields.many2many('res.users', 'res_groups_users_rel', 'gid', 'uid', 'Users'),
'model_access': fields.one2many('ir.model.access', 'group_id', 'Access Controls'),
'rule_groups': fields.many2many('ir.rule', 'rule_group_rel',
'group_id', 'rule_group_id', 'Rules', domain=[('global', '=', False)]),
'menu_access': fields.many2many('ir.ui.menu', 'ir_ui_menu_group_rel', 'gid', 'menu_id', 'Access Menu'),
'view_access': fields.many2many('ir.ui.view', 'ir_ui_view_group_rel', 'group_id', 'view_id', 'Views'),
'comment' : fields.text('Comment', size=250, translate=True),
'category_id': fields.many2one('ir.module.category', 'Application', select=True),
'full_name': fields.function(_get_full_name, type='char', string='Group Name', fnct_search=_search_group),
}
_sql_constraints = [
('name_uniq', 'unique (category_id, name)', 'The name of the group must be unique within an application!')
]
def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False):
# add explicit ordering if search is sorted on full_name
if order and order.startswith('full_name'):
ids = super(res_groups, self).search(cr, uid, args, context=context)
gs = self.browse(cr, uid, ids, context)
gs.sort(key=lambda g: g.full_name, reverse=order.endswith('DESC'))
gs = gs[offset:offset+limit] if limit else gs[offset:]
return map(int, gs)
return super(res_groups, self).search(cr, uid, args, offset, limit, order, context, count)
def copy(self, cr, uid, id, default=None, context=None):
group_name = self.read(cr, uid, [id], ['name'])[0]['name']
default.update({'name': _('%s (copy)')%group_name})
return super(res_groups, self).copy(cr, uid, id, default, context)
def write(self, cr, uid, ids, vals, context=None):
if 'name' in vals:
if vals['name'].startswith('-'):
raise osv.except_osv(_('Error'),
_('The name of the group can not start with "-"'))
res = super(res_groups, self).write(cr, uid, ids, vals, context=context)
self.pool['ir.model.access'].call_cache_clearing_methods(cr)
self.pool['res.users'].has_group.clear_cache(self.pool['res.users'])
return res
class res_users(osv.osv):
""" User class. A res.users record models an OpenERP user and is different
from an employee.
res.users class now inherits from res.partner. The partner model is
used to store the data related to the partner: lang, name, address,
avatar, ... The user model is now dedicated to technical data.
"""
__admin_ids = {}
_uid_cache = {}
_inherits = {
'res.partner': 'partner_id',
}
_name = "res.users"
_description = 'Users'
def _set_new_password(self, cr, uid, id, name, value, args, context=None):
if value is False:
# Do not update the password if no value is provided, ignore silently.
# For example web client submits False values for all empty fields.
return
if uid == id:
# To change their own password users must use the client-specific change password wizard,
# so that the new password is immediately used for further RPC requests, otherwise the user
# will face unexpected 'Access Denied' exceptions.
raise osv.except_osv(_('Operation Canceled'), _('Please use the change password wizard (in User Preferences or User menu) to change your own password.'))
self.write(cr, uid, id, {'password': value})
def _get_password(self, cr, uid, ids, arg, karg, context=None):
return dict.fromkeys(ids, '')
_columns = {
'id': fields.integer('ID'),
'login_date': fields.date('Latest connection', select=1, copy=False),
'partner_id': fields.many2one('res.partner', required=True,
string='Related Partner', ondelete='restrict',
help='Partner-related data of the user', auto_join=True),
'login': fields.char('Login', size=64, required=True,
help="Used to log into the system"),
'password': fields.char('Password', size=64, invisible=True, copy=False,
help="Keep empty if you don't want the user to be able to connect on the system."),
'new_password': fields.function(_get_password, type='char', size=64,
fnct_inv=_set_new_password, string='Set Password',
help="Specify a value only when creating a user or if you're "\
"changing the user's password, otherwise leave empty. After "\
"a change of password, the user has to login again."),
'signature': fields.html('Signature'),
'active': fields.boolean('Active'),
'action_id': fields.many2one('ir.actions.actions', 'Home Action', help="If specified, this action will be opened at log on for this user, in addition to the standard menu."),
'groups_id': fields.many2many('res.groups', 'res_groups_users_rel', 'uid', 'gid', 'Groups'),
# Special behavior for this field: res.company.search() will only return the companies
# available to the current user (should be the user's companies?), when the user_preference
# context is set.
'company_id': fields.many2one('res.company', 'Company', required=True,
help='The company this user is currently working for.', context={'user_preference': True}),
'company_ids':fields.many2many('res.company','res_company_users_rel','user_id','cid','Companies'),
}
# overridden inherited fields to bypass access rights, in case you have
# access to the user but not its corresponding partner
name = openerp.fields.Char(related='partner_id.name', inherited=True)
email = openerp.fields.Char(related='partner_id.email', inherited=True)
def on_change_login(self, cr, uid, ids, login, context=None):
if login and tools.single_email_re.match(login):
return {'value': {'email': login}}
return {}
def onchange_state(self, cr, uid, ids, state_id, context=None):
partner_ids = [user.partner_id.id for user in self.browse(cr, uid, ids, context=context)]
return self.pool.get('res.partner').onchange_state(cr, uid, partner_ids, state_id, context=context)
def onchange_type(self, cr, uid, ids, is_company, context=None):
""" Wrapper on the user.partner onchange_type, because some calls to the
partner form view applied to the user may trigger the
partner.onchange_type method, but applied to the user object.
"""
partner_ids = [user.partner_id.id for user in self.browse(cr, uid, ids, context=context)]
return self.pool['res.partner'].onchange_type(cr, uid, partner_ids, is_company, context=context)
def onchange_address(self, cr, uid, ids, use_parent_address, parent_id, context=None):
""" Wrapper on the user.partner onchange_address, because some calls to the
partner form view applied to the user may trigger the
partner.onchange_type method, but applied to the user object.
"""
partner_ids = [user.partner_id.id for user in self.browse(cr, uid, ids, context=context)]
return self.pool['res.partner'].onchange_address(cr, uid, partner_ids, use_parent_address, parent_id, context=context)
def _check_company(self, cr, uid, ids, context=None):
return all(((this.company_id in this.company_ids) or not this.company_ids) for this in self.browse(cr, uid, ids, context))
_constraints = [
(_check_company, 'The chosen company is not in the allowed companies for this user', ['company_id', 'company_ids']),
]
_sql_constraints = [
('login_key', 'UNIQUE (login)', 'You can not have two users with the same login !')
]
def _get_company(self,cr, uid, context=None, uid2=False):
if not uid2:
uid2 = uid
# Use read() to compute default company, and pass load=_classic_write to
# avoid useless name_get() calls. This will avoid prefetching fields
# while computing default values for new db columns, as the
# db backend may not be fully initialized yet.
user_data = self.pool['res.users'].read(cr, uid, uid2, ['company_id'],
context=context, load='_classic_write')
comp_id = user_data['company_id']
return comp_id or False
def _get_companies(self, cr, uid, context=None):
c = self._get_company(cr, uid, context)
if c:
return [c]
return False
def _get_group(self,cr, uid, context=None):
dataobj = self.pool.get('ir.model.data')
result = []
try:
dummy,group_id = dataobj.get_object_reference(cr, SUPERUSER_ID, 'base', 'group_user')
result.append(group_id)
dummy,group_id = dataobj.get_object_reference(cr, SUPERUSER_ID, 'base', 'group_partner_manager')
result.append(group_id)
except ValueError:
# If these groups does not exists anymore
pass
return result
def _get_default_image(self, cr, uid, context=None):
return self.pool['res.partner']._get_default_image(cr, uid, False, colorize=True, context=context)
_defaults = {
'password': '',
'active': True,
'customer': False,
'company_id': _get_company,
'company_ids': _get_companies,
'groups_id': _get_group,
'image': _get_default_image,
}
# User can write on a few of his own fields (but not his groups for example)
SELF_WRITEABLE_FIELDS = ['password', 'signature', 'action_id', 'company_id', 'email', 'name', 'image', 'image_medium', 'image_small', 'lang', 'tz']
# User can read a few of his own fields
SELF_READABLE_FIELDS = ['signature', 'company_id', 'login', 'email', 'name', 'image', 'image_medium', 'image_small', 'lang', 'tz', 'tz_offset', 'groups_id', 'partner_id', '__last_update']
def read(self, cr, uid, ids, fields=None, context=None, load='_classic_read'):
def override_password(o):
if 'password' in o and ('id' not in o or o['id'] != uid):
o['password'] = '********'
return o
if fields and (ids == [uid] or ids == uid):
for key in fields:
if not (key in self.SELF_READABLE_FIELDS or key.startswith('context_')):
break
else:
# safe fields only, so we read as super-user to bypass access rights
uid = SUPERUSER_ID
result = super(res_users, self).read(cr, uid, ids, fields=fields, context=context, load=load)
canwrite = self.pool['ir.model.access'].check(cr, uid, 'res.users', 'write', False)
if not canwrite:
if isinstance(ids, (int, long)):
result = override_password(result)
else:
result = map(override_password, result)
return result
def create(self, cr, uid, vals, context=None):
user_id = super(res_users, self).create(cr, uid, vals, context=context)
user = self.browse(cr, uid, user_id, context=context)
if user.partner_id.company_id:
user.partner_id.write({'company_id': user.company_id.id})
return user_id
def write(self, cr, uid, ids, values, context=None):
if not hasattr(ids, '__iter__'):
ids = [ids]
if ids == [uid]:
for key in values.keys():
if not (key in self.SELF_WRITEABLE_FIELDS or key.startswith('context_')):
break
else:
if 'company_id' in values:
user = self.browse(cr, SUPERUSER_ID, uid, context=context)
if not (values['company_id'] in user.company_ids.ids):
del values['company_id']
uid = 1 # safe fields only, so we write as super-user to bypass access rights
res = super(res_users, self).write(cr, uid, ids, values, context=context)
if 'company_id' in values:
for user in self.browse(cr, uid, ids, context=context):
# if partner is global we keep it that way
if user.partner_id.company_id and user.partner_id.company_id.id != values['company_id']:
user.partner_id.write({'company_id': user.company_id.id})
# clear caches linked to the users
self.pool['ir.model.access'].call_cache_clearing_methods(cr)
clear = partial(self.pool['ir.rule'].clear_cache, cr)
map(clear, ids)
db = cr.dbname
if db in self._uid_cache:
for id in ids:
if id in self._uid_cache[db]:
del self._uid_cache[db][id]
self.context_get.clear_cache(self)
self.has_group.clear_cache(self)
return res
def unlink(self, cr, uid, ids, context=None):
if 1 in ids:
raise osv.except_osv(_('Can not remove root user!'), _('You can not remove the admin user as it is used internally for resources created by Odoo (updates, module installation, ...)'))
db = cr.dbname
if db in self._uid_cache:
for id in ids:
if id in self._uid_cache[db]:
del self._uid_cache[db][id]
return super(res_users, self).unlink(cr, uid, ids, context=context)
def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
if not args:
args=[]
if not context:
context={}
ids = []
if name and operator in ['=', 'ilike']:
ids = self.search(cr, user, [('login','=',name)]+ args, limit=limit, context=context)
if not ids:
ids = self.search(cr, user, [('name',operator,name)]+ args, limit=limit, context=context)
return self.name_get(cr, user, ids, context=context)
def copy(self, cr, uid, id, default=None, context=None):
user2copy = self.read(cr, uid, [id], ['login','name'])[0]
default = dict(default or {})
if ('name' not in default) and ('partner_id' not in default):
default['name'] = _("%s (copy)") % user2copy['name']
if 'login' not in default:
default['login'] = _("%s (copy)") % user2copy['login']
return super(res_users, self).copy(cr, uid, id, default, context)
@tools.ormcache(skiparg=2)
def context_get(self, cr, uid, context=None):
user = self.browse(cr, SUPERUSER_ID, uid, context)
result = {}
for k in self._fields:
if k.startswith('context_'):
context_key = k[8:]
elif k in ['lang', 'tz']:
context_key = k
else:
context_key = False
if context_key:
res = getattr(user, k) or False
if isinstance(res, models.BaseModel):
res = res.id
result[context_key] = res or False
return result
def action_get(self, cr, uid, context=None):
dataobj = self.pool['ir.model.data']
data_id = dataobj._get_id(cr, SUPERUSER_ID, 'base', 'action_res_users_my')
return dataobj.browse(cr, uid, data_id, context=context).res_id
def check_super(self, passwd):
if passwd == tools.config['admin_passwd']:
return True
else:
raise openerp.exceptions.AccessDenied()
def check_credentials(self, cr, uid, password):
""" Override this method to plug additional authentication methods"""
res = self.search(cr, SUPERUSER_ID, [('id','=',uid),('password','=',password)])
if not res:
raise openerp.exceptions.AccessDenied()
def _login(self, db, login, password):
if not password:
return False
user_id = False
cr = self.pool.cursor()
try:
# autocommit: our single update request will be performed atomically.
# (In this way, there is no opportunity to have two transactions
# interleaving their cr.execute()..cr.commit() calls and have one
# of them rolled back due to a concurrent access.)
cr.autocommit(True)
# check if user exists
res = self.search(cr, SUPERUSER_ID, [('login','=',login)])
if res:
user_id = res[0]
# check credentials
self.check_credentials(cr, user_id, password)
# We effectively unconditionally write the res_users line.
# Even w/ autocommit there's a chance the user row will be locked,
# in which case we can't delay the login just for the purpose of
# update the last login date - hence we use FOR UPDATE NOWAIT to
# try to get the lock - fail-fast
# Failing to acquire the lock on the res_users row probably means
# another request is holding it. No big deal, we don't want to
# prevent/delay login in that case. It will also have been logged
# as a SQL error, if anyone cares.
try:
# NO KEY introduced in PostgreSQL 9.3 http://www.postgresql.org/docs/9.3/static/release-9-3.html#AEN115299
update_clause = 'NO KEY UPDATE' if cr._cnx.server_version >= 90300 else 'UPDATE'
cr.execute("SELECT id FROM res_users WHERE id=%%s FOR %s NOWAIT" % update_clause, (user_id,), log_exceptions=False)
cr.execute("UPDATE res_users SET login_date = now() AT TIME ZONE 'UTC' WHERE id=%s", (user_id,))
self.invalidate_cache(cr, user_id, ['login_date'], [user_id])
except Exception:
_logger.debug("Failed to update last_login for db:%s login:%s", db, login, exc_info=True)
except openerp.exceptions.AccessDenied:
_logger.info("Login failed for db:%s login:%s", db, login)
user_id = False
finally:
cr.close()
return user_id
def authenticate(self, db, login, password, user_agent_env):
"""Verifies and returns the user ID corresponding to the given
``login`` and ``password`` combination, or False if there was
no matching user.
:param str db: the database on which user is trying to authenticate
:param str login: username
:param str password: user password
:param dict user_agent_env: environment dictionary describing any
relevant environment attributes
"""
uid = self._login(db, login, password)
if uid == openerp.SUPERUSER_ID:
# Successfully logged in as admin!
# Attempt to guess the web base url...
if user_agent_env and user_agent_env.get('base_location'):
cr = self.pool.cursor()
try:
base = user_agent_env['base_location']
ICP = self.pool['ir.config_parameter']
if not ICP.get_param(cr, uid, 'web.base.url.freeze'):
ICP.set_param(cr, uid, 'web.base.url', base)
cr.commit()
except Exception:
_logger.exception("Failed to update web.base.url configuration parameter")
finally:
cr.close()
return uid
def check(self, db, uid, passwd):
"""Verifies that the given (uid, password) is authorized for the database ``db`` and
raise an exception if it is not."""
if not passwd:
# empty passwords disallowed for obvious security reasons
raise openerp.exceptions.AccessDenied()
if self._uid_cache.get(db, {}).get(uid) == passwd:
return
cr = self.pool.cursor()
try:
self.check_credentials(cr, uid, passwd)
if self._uid_cache.has_key(db):
self._uid_cache[db][uid] = passwd
else:
self._uid_cache[db] = {uid:passwd}
finally:
cr.close()
def change_password(self, cr, uid, old_passwd, new_passwd, context=None):
"""Change current user password. Old password must be provided explicitly
to prevent hijacking an existing user session, or for cases where the cleartext
password is not used to authenticate requests.
:return: True
:raise: openerp.exceptions.AccessDenied when old password is wrong
:raise: except_osv when new password is not set or empty
"""
self.check(cr.dbname, uid, old_passwd)
if new_passwd:
return self.write(cr, uid, uid, {'password': new_passwd})
raise osv.except_osv(_('Warning!'), _("Setting empty passwords is not allowed for security reasons!"))
def preference_save(self, cr, uid, ids, context=None):
return {
'type': 'ir.actions.client',
'tag': 'reload_context',
}
def preference_change_password(self, cr, uid, ids, context=None):
return {
'type': 'ir.actions.client',
'tag': 'change_password',
'target': 'new',
}
@tools.ormcache(skiparg=2)
def has_group(self, cr, uid, group_ext_id):
"""Checks whether user belongs to given group.
:param str group_ext_id: external ID (XML ID) of the group.
Must be provided in fully-qualified form (``module.ext_id``), as there
is no implicit module to use..
:return: True if the current user is a member of the group with the
given external ID (XML ID), else False.
"""
assert group_ext_id and '.' in group_ext_id, "External ID must be fully qualified"
module, ext_id = group_ext_id.split('.')
cr.execute("""SELECT 1 FROM res_groups_users_rel WHERE uid=%s AND gid IN
(SELECT res_id FROM ir_model_data WHERE module=%s AND name=%s)""",
(uid, module, ext_id))
return bool(cr.fetchone())
#----------------------------------------------------------
# Implied groups
#
# Extension of res.groups and res.users with a relation for "implied"
# or "inherited" groups. Once a user belongs to a group, it
# automatically belongs to the implied groups (transitively).
#----------------------------------------------------------
class cset(object):
""" A cset (constrained set) is a set of elements that may be constrained to
be a subset of other csets. Elements added to a cset are automatically
added to its supersets. Cycles in the subset constraints are supported.
"""
def __init__(self, xs):
self.supersets = set()
self.elements = set(xs)
def subsetof(self, other):
if other is not self:
self.supersets.add(other)
other.update(self.elements)
def update(self, xs):
xs = set(xs) - self.elements
if xs: # xs will eventually be empty in case of a cycle
self.elements.update(xs)
for s in self.supersets:
s.update(xs)
def __iter__(self):
return iter(self.elements)
concat = itertools.chain.from_iterable
class groups_implied(osv.osv):
_inherit = 'res.groups'
def _get_trans_implied(self, cr, uid, ids, field, arg, context=None):
"computes the transitive closure of relation implied_ids"
memo = {} # use a memo for performance and cycle avoidance
def computed_set(g):
if g not in memo:
memo[g] = cset(g.implied_ids)
for h in g.implied_ids:
computed_set(h).subsetof(memo[g])
return memo[g]
res = {}
for g in self.browse(cr, SUPERUSER_ID, ids, context):
res[g.id] = map(int, computed_set(g))
return res
_columns = {
'implied_ids': fields.many2many('res.groups', 'res_groups_implied_rel', 'gid', 'hid',
string='Inherits', help='Users of this group automatically inherit those groups'),
'trans_implied_ids': fields.function(_get_trans_implied,
type='many2many', relation='res.groups', string='Transitively inherits'),
}
def create(self, cr, uid, values, context=None):
users = values.pop('users', None)
gid = super(groups_implied, self).create(cr, uid, values, context)
if users:
# delegate addition of users to add implied groups
self.write(cr, uid, [gid], {'users': users}, context)
return gid
def write(self, cr, uid, ids, values, context=None):
res = super(groups_implied, self).write(cr, uid, ids, values, context)
if values.get('users') or values.get('implied_ids'):
# add all implied groups (to all users of each group)
for g in self.browse(cr, uid, ids, context=context):
gids = map(int, g.trans_implied_ids)
vals = {'users': [(4, u.id) for u in g.users]}
super(groups_implied, self).write(cr, uid, gids, vals, context)
return res
class users_implied(osv.osv):
_inherit = 'res.users'
def create(self, cr, uid, values, context=None):
groups = values.pop('groups_id', None)
user_id = super(users_implied, self).create(cr, uid, values, context)
if groups:
# delegate addition of groups to add implied groups
self.write(cr, uid, [user_id], {'groups_id': groups}, context)
self.pool['ir.ui.view'].clear_cache()
return user_id
def write(self, cr, uid, ids, values, context=None):
if not isinstance(ids,list):
ids = [ids]
res = super(users_implied, self).write(cr, uid, ids, values, context)
if values.get('groups_id'):
# add implied groups for all users
for user in self.browse(cr, uid, ids):
gs = set(concat(g.trans_implied_ids for g in user.groups_id))
vals = {'groups_id': [(4, g.id) for g in gs]}
super(users_implied, self).write(cr, uid, [user.id], vals, context)
self.pool['ir.ui.view'].clear_cache()
return res
#----------------------------------------------------------
# Vitrual checkbox and selection for res.user form view
#
# Extension of res.groups and res.users for the special groups view in the users
# form. This extension presents groups with selection and boolean widgets:
# - Groups are shown by application, with boolean and/or selection fields.
# Selection fields typically defines a role "Name" for the given application.
# - Uncategorized groups are presented as boolean fields and grouped in a
# section "Others".
#
# The user form view is modified by an inherited view (base.user_groups_view);
# the inherited view replaces the field 'groups_id' by a set of reified group
# fields (boolean or selection fields). The arch of that view is regenerated
# each time groups are changed.
#
# Naming conventions for reified groups fields:
# - boolean field 'in_group_ID' is True iff
# ID is in 'groups_id'
# - selection field 'sel_groups_ID1_..._IDk' is ID iff
# ID is in 'groups_id' and ID is maximal in the set {ID1, ..., IDk}
#----------------------------------------------------------
def name_boolean_group(id):
return 'in_group_' + str(id)
def name_selection_groups(ids):
return 'sel_groups_' + '_'.join(map(str, ids))
def is_boolean_group(name):
return name.startswith('in_group_')
def is_selection_groups(name):
return name.startswith('sel_groups_')
def is_reified_group(name):
return is_boolean_group(name) or is_selection_groups(name)
def get_boolean_group(name):
return int(name[9:])
def get_selection_groups(name):
return map(int, name[11:].split('_'))
def partition(f, xs):
"return a pair equivalent to (filter(f, xs), filter(lambda x: not f(x), xs))"
yes, nos = [], []
for x in xs:
(yes if f(x) else nos).append(x)
return yes, nos
def parse_m2m(commands):
"return a list of ids corresponding to a many2many value"
ids = []
for command in commands:
if isinstance(command, (tuple, list)):
if command[0] in (1, 4):
ids.append(command[2])
elif command[0] == 5:
ids = []
elif command[0] == 6:
ids = list(command[2])
else:
ids.append(command)
return ids
class groups_view(osv.osv):
_inherit = 'res.groups'
def create(self, cr, uid, values, context=None):
res = super(groups_view, self).create(cr, uid, values, context)
self.update_user_groups_view(cr, uid, context)
return res
def write(self, cr, uid, ids, values, context=None):
res = super(groups_view, self).write(cr, uid, ids, values, context)
self.update_user_groups_view(cr, uid, context)
return res
def unlink(self, cr, uid, ids, context=None):
res = super(groups_view, self).unlink(cr, uid, ids, context)
self.update_user_groups_view(cr, uid, context)
return res
def update_user_groups_view(self, cr, uid, context=None):
# the view with id 'base.user_groups_view' inherits the user form view,
# and introduces the reified group fields
# we have to try-catch this, because at first init the view does not exist
# but we are already creating some basic groups
view = self.pool['ir.model.data'].xmlid_to_object(cr, SUPERUSER_ID, 'base.user_groups_view', context=context)
if view and view.exists() and view._name == 'ir.ui.view':
xml1, xml2 = [], []
xml1.append(E.separator(string=_('Application'), colspan="4"))
for app, kind, gs in self.get_groups_by_application(cr, uid, context):
# hide groups in category 'Hidden' (except to group_no_one)
attrs = {'groups': 'base.group_no_one'} if app and app.xml_id == 'base.module_category_hidden' else {}
if kind == 'selection':
# application name with a selection field
field_name = name_selection_groups(map(int, gs))
xml1.append(E.field(name=field_name, **attrs))
xml1.append(E.newline())
else:
# application separator with boolean fields
app_name = app and app.name or _('Other')
xml2.append(E.separator(string=app_name, colspan="4", **attrs))
for g in gs:
field_name = name_boolean_group(g.id)
xml2.append(E.field(name=field_name, **attrs))
xml = E.field(*(xml1 + xml2), name="groups_id", position="replace")
xml.addprevious(etree.Comment("GENERATED AUTOMATICALLY BY GROUPS"))
xml_content = etree.tostring(xml, pretty_print=True, xml_declaration=True, encoding="utf-8")
view.write({'arch': xml_content})
return True
def get_application_groups(self, cr, uid, domain=None, context=None):
return self.search(cr, uid, domain or [])
def get_groups_by_application(self, cr, uid, context=None):
""" return all groups classified by application (module category), as a list of pairs:
[(app, kind, [group, ...]), ...],
where app and group are browse records, and kind is either 'boolean' or 'selection'.
Applications are given in sequence order. If kind is 'selection', the groups are
given in reverse implication order.
"""
def linearized(gs):
gs = set(gs)
# determine sequence order: a group should appear after its implied groups
order = dict.fromkeys(gs, 0)
for g in gs:
for h in gs.intersection(g.trans_implied_ids):
order[h] -= 1
# check whether order is total, i.e., sequence orders are distinct
if len(set(order.itervalues())) == len(gs):
return sorted(gs, key=lambda g: order[g])
return None
# classify all groups by application
gids = self.get_application_groups(cr, uid, context=context)
by_app, others = {}, []
for g in self.browse(cr, uid, gids, context):
if g.category_id:
by_app.setdefault(g.category_id, []).append(g)
else:
others.append(g)
# build the result
res = []
apps = sorted(by_app.iterkeys(), key=lambda a: a.sequence or 0)
for app in apps:
gs = linearized(by_app[app])
if gs:
res.append((app, 'selection', gs))
else:
res.append((app, 'boolean', by_app[app]))
if others:
res.append((False, 'boolean', others))
return res
class users_view(osv.osv):
_inherit = 'res.users'
def create(self, cr, uid, values, context=None):
values = self._remove_reified_groups(values)
return super(users_view, self).create(cr, uid, values, context)
def write(self, cr, uid, ids, values, context=None):
values = self._remove_reified_groups(values)
return super(users_view, self).write(cr, uid, ids, values, context)
def _remove_reified_groups(self, values):
""" return `values` without reified group fields """
add, rem = [], []
values1 = {}
for key, val in values.iteritems():
if is_boolean_group(key):
(add if val else rem).append(get_boolean_group(key))
elif is_selection_groups(key):
rem += get_selection_groups(key)
if val:
add.append(val)
else:
values1[key] = val
if 'groups_id' not in values and (add or rem):
# remove group ids in `rem` and add group ids in `add`
values1['groups_id'] = zip(repeat(3), rem) + zip(repeat(4), add)
return values1
def default_get(self, cr, uid, fields, context=None):
group_fields, fields = partition(is_reified_group, fields)
fields1 = (fields + ['groups_id']) if group_fields else fields
values = super(users_view, self).default_get(cr, uid, fields1, context)
self._add_reified_groups(group_fields, values)
# add "default_groups_ref" inside the context to set default value for group_id with xml values
if 'groups_id' in fields and isinstance(context.get("default_groups_ref"), list):
groups = []
ir_model_data = self.pool.get('ir.model.data')
for group_xml_id in context["default_groups_ref"]:
group_split = group_xml_id.split('.')
if len(group_split) != 2:
raise osv.except_osv(_('Invalid context value'), _('Invalid context default_groups_ref value (model.name_id) : "%s"') % group_xml_id)
try:
temp, group_id = ir_model_data.get_object_reference(cr, uid, group_split[0], group_split[1])
except ValueError:
group_id = False
groups += [group_id]
values['groups_id'] = groups
return values
def read(self, cr, uid, ids, fields=None, context=None, load='_classic_read'):
# determine whether reified groups fields are required, and which ones
fields1 = fields or self.fields_get(cr, uid, context=context).keys()
group_fields, other_fields = partition(is_reified_group, fields1)
# read regular fields (other_fields); add 'groups_id' if necessary
drop_groups_id = False
if group_fields and fields:
if 'groups_id' not in other_fields:
other_fields.append('groups_id')
drop_groups_id = True
else:
other_fields = fields
res = super(users_view, self).read(cr, uid, ids, other_fields, context=context, load=load)
# post-process result to add reified group fields
if group_fields:
for values in (res if isinstance(res, list) else [res]):
self._add_reified_groups(group_fields, values)
if drop_groups_id:
values.pop('groups_id', None)
return res
def _add_reified_groups(self, fields, values):
""" add the given reified group fields into `values` """
gids = set(parse_m2m(values.get('groups_id') or []))
for f in fields:
if is_boolean_group(f):
values[f] = get_boolean_group(f) in gids
elif is_selection_groups(f):
selected = [gid for gid in get_selection_groups(f) if gid in gids]
values[f] = selected and selected[-1] or False
def fields_get(self, cr, uid, allfields=None, context=None, write_access=True):
res = super(users_view, self).fields_get(cr, uid, allfields, context, write_access)
# add reified groups fields
for app, kind, gs in self.pool['res.groups'].get_groups_by_application(cr, uid, context):
if kind == 'selection':
# selection group field
tips = ['%s: %s' % (g.name, g.comment) for g in gs if g.comment]
res[name_selection_groups(map(int, gs))] = {
'type': 'selection',
'string': app and app.name or _('Other'),
'selection': [(False, '')] + [(g.id, g.name) for g in gs],
'help': '\n'.join(tips),
'exportable': False,
'selectable': False,
}
else:
# boolean group fields
for g in gs:
res[name_boolean_group(g.id)] = {
'type': 'boolean',
'string': g.name,
'help': g.comment,
'exportable': False,
'selectable': False,
}
return res
#----------------------------------------------------------
# change password wizard
#----------------------------------------------------------
class change_password_wizard(osv.TransientModel):
"""
A wizard to manage the change of users' passwords
"""
_name = "change.password.wizard"
_description = "Change Password Wizard"
_columns = {
'user_ids': fields.one2many('change.password.user', 'wizard_id', string='Users'),
}
def _default_user_ids(self, cr, uid, context=None):
if context is None:
context = {}
user_model = self.pool['res.users']
user_ids = context.get('active_model') == 'res.users' and context.get('active_ids') or []
return [
(0, 0, {'user_id': user.id, 'user_login': user.login})
for user in user_model.browse(cr, uid, user_ids, context=context)
]
_defaults = {
'user_ids': _default_user_ids,
}
def change_password_button(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids, context=context)[0]
need_reload = any(uid == user.user_id.id for user in wizard.user_ids)
line_ids = [user.id for user in wizard.user_ids]
self.pool.get('change.password.user').change_password_button(cr, uid, line_ids, context=context)
if need_reload:
return {
'type': 'ir.actions.client',
'tag': 'reload'
}
return {'type': 'ir.actions.act_window_close'}
class change_password_user(osv.TransientModel):
"""
A model to configure users in the change password wizard
"""
_name = 'change.password.user'
_description = 'Change Password Wizard User'
_columns = {
'wizard_id': fields.many2one('change.password.wizard', string='Wizard', required=True),
'user_id': fields.many2one('res.users', string='User', required=True),
'user_login': fields.char('User Login', readonly=True),
'new_passwd': fields.char('New Password'),
}
_defaults = {
'new_passwd': '',
}
def change_password_button(self, cr, uid, ids, context=None):
for line in self.browse(cr, uid, ids, context=context):
line.user_id.write({'password': line.new_passwd})
# don't keep temporary passwords in the database longer than necessary
self.write(cr, uid, ids, {'new_passwd': False}, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| danielharbor/openerp | openerp/addons/base/res/res_users.py | Python | agpl-3.0 | 44,634 |
// Copyright 2017 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package storage
import (
"github.com/prometheus/prometheus/model/labels"
)
type noopQuerier struct{}
// NoopQuerier is a Querier that does nothing.
func NoopQuerier() Querier {
return noopQuerier{}
}
func (noopQuerier) Select(bool, *SelectHints, ...*labels.Matcher) SeriesSet {
return NoopSeriesSet()
}
func (noopQuerier) LabelValues(string, ...*labels.Matcher) ([]string, Warnings, error) {
return nil, nil, nil
}
func (noopQuerier) LabelNames(...*labels.Matcher) ([]string, Warnings, error) {
return nil, nil, nil
}
func (noopQuerier) Close() error {
return nil
}
type noopChunkQuerier struct{}
// NoopChunkedQuerier is a ChunkQuerier that does nothing.
func NoopChunkedQuerier() ChunkQuerier {
return noopChunkQuerier{}
}
func (noopChunkQuerier) Select(bool, *SelectHints, ...*labels.Matcher) ChunkSeriesSet {
return NoopChunkedSeriesSet()
}
func (noopChunkQuerier) LabelValues(string, ...*labels.Matcher) ([]string, Warnings, error) {
return nil, nil, nil
}
func (noopChunkQuerier) LabelNames(...*labels.Matcher) ([]string, Warnings, error) {
return nil, nil, nil
}
func (noopChunkQuerier) Close() error {
return nil
}
type noopSeriesSet struct{}
// NoopSeriesSet is a SeriesSet that does nothing.
func NoopSeriesSet() SeriesSet {
return noopSeriesSet{}
}
func (noopSeriesSet) Next() bool { return false }
func (noopSeriesSet) At() Series { return nil }
func (noopSeriesSet) Err() error { return nil }
func (noopSeriesSet) Warnings() Warnings { return nil }
type noopChunkedSeriesSet struct{}
// NoopChunkedSeriesSet is a ChunkSeriesSet that does nothing.
func NoopChunkedSeriesSet() ChunkSeriesSet {
return noopChunkedSeriesSet{}
}
func (noopChunkedSeriesSet) Next() bool { return false }
func (noopChunkedSeriesSet) At() ChunkSeries { return nil }
func (noopChunkedSeriesSet) Err() error { return nil }
func (noopChunkedSeriesSet) Warnings() Warnings { return nil }
| grafana/loki | vendor/github.com/prometheus/prometheus/storage/noop.go | GO | agpl-3.0 | 2,503 |
/*
YUI 3.11.0 (build d549e5c)
Copyright 2013 Yahoo! Inc. All rights reserved.
Licensed under the BSD License.
http://yuilibrary.com/license/
*/
YUI.add('tree-openable', function (Y, NAME) {
/*jshint expr:true, onevar:false */
/**
Extension for `Tree` that adds the concept of open/closed state for nodes.
@module tree
@submodule tree-openable
@main tree-openable
**/
/**
Extension for `Tree` that adds the concept of open/closed state for nodes.
@class Tree.Openable
@constructor
@extensionfor Tree
**/
/**
Fired when a node is closed.
@event close
@param {Tree.Node} node Node being closed.
@param {String} src Source of the event.
@preventable _defCloseFn
**/
var EVT_CLOSE = 'close';
/**
Fired when a node is opened.
@event open
@param {Tree.Node} node Node being opened.
@param {String} src Source of the event.
@preventable _defOpenFn
**/
var EVT_OPEN = 'open';
function Openable() {}
Openable.prototype = {
// -- Lifecycle ------------------------------------------------------------
initializer: function () {
this.nodeExtensions = this.nodeExtensions.concat(Y.Tree.Node.Openable);
},
// -- Public Methods -------------------------------------------------------
/**
Closes the specified node if it isn't already closed.
@method closeNode
@param {Tree.Node} node Node to close.
@param {Object} [options] Options.
@param {Boolean} [options.silent=false] If `true`, the `close` event
will be suppressed.
@param {String} [options.src] Source of the change, to be passed along
to the event facade of the resulting event. This can be used to
distinguish between changes triggered by a user and changes
triggered programmatically, for example.
@chainable
**/
closeNode: function (node, options) {
if (node.canHaveChildren && node.isOpen()) {
this._fireTreeEvent(EVT_CLOSE, {
node: node,
src : options && options.src
}, {
defaultFn: this._defCloseFn,
silent : options && options.silent
});
}
return this;
},
/**
Opens the specified node if it isn't already open.
@method openNode
@param {Tree.Node} node Node to open.
@param {Object} [options] Options.
@param {Boolean} [options.silent=false] If `true`, the `open` event
will be suppressed.
@param {String} [options.src] Source of the change, to be passed along
to the event facade of the resulting event. This can be used to
distinguish between changes triggered by a user and changes
triggered programmatically, for example.
@chainable
**/
openNode: function (node, options) {
if (node.canHaveChildren && !node.isOpen()) {
this._fireTreeEvent(EVT_OPEN, {
node: node,
src : options && options.src
}, {
defaultFn: this._defOpenFn,
silent : options && options.silent
});
}
return this;
},
/**
Toggles the open/closed state of the specified node, closing it if it's
currently open or opening it if it's currently closed.
@method toggleOpenNode
@param {Tree.Node} node Node to toggle.
@param {Object} [options] Options.
@param {Boolean} [options.silent=false] If `true`, events will be
suppressed.
@param {String} [options.src] Source of the change, to be passed along
to the event facade of the resulting event. This can be used to
distinguish between changes triggered by a user and changes
triggered programmatically, for example.
@chainable
**/
toggleOpenNode: function (node, options) {
return node.isOpen() ? this.closeNode(node, options) :
this.openNode(node, options);
},
// -- Default Event Handlers -----------------------------------------------
/**
Default handler for the `close` event.
@method _defCloseFn
@param {EventFacade} e
@protected
**/
_defCloseFn: function (e) {
delete e.node.state.open;
},
/**
Default handler for the `open` event.
@method _defOpenFn
@param {EventFacade} e
@protected
**/
_defOpenFn: function (e) {
e.node.state.open = true;
}
};
Y.Tree.Openable = Openable;
/**
@module tree
@submodule tree-openable
**/
/**
`Tree.Node` extension that adds methods useful for nodes in trees that use the
`Tree.Openable` extension.
@class Tree.Node.Openable
@constructor
@extensionfor Tree.Node
**/
function NodeOpenable() {}
NodeOpenable.prototype = {
/**
Closes this node if it's currently open.
@method close
@param {Object} [options] Options.
@param {Boolean} [options.silent=false] If `true`, the `close` event
will be suppressed.
@param {String} [options.src] Source of the change, to be passed along
to the event facade of the resulting event. This can be used to
distinguish between changes triggered by a user and changes
triggered programmatically, for example.
@chainable
**/
close: function (options) {
this.tree.closeNode(this, options);
return this;
},
/**
Returns `true` if this node is currently open.
Note: the root node of a tree is always considered to be open.
@method isOpen
@return {Boolean} `true` if this node is currently open, `false` otherwise.
**/
isOpen: function () {
return !!this.state.open || this.isRoot();
},
/**
Opens this node if it's currently closed.
@method open
@param {Object} [options] Options.
@param {Boolean} [options.silent=false] If `true`, the `open` event
will be suppressed.
@param {String} [options.src] Source of the change, to be passed along
to the event facade of the resulting event. This can be used to
distinguish between changes triggered by a user and changes
triggered programmatically, for example.
@chainable
**/
open: function (options) {
this.tree.openNode(this, options);
return this;
},
/**
Toggles the open/closed state of this node, closing it if it's currently
open or opening it if it's currently closed.
@method toggleOpen
@param {Object} [options] Options.
@param {Boolean} [options.silent=false] If `true`, events will be
suppressed.
@param {String} [options.src] Source of the change, to be passed along
to the event facade of the resulting event. This can be used to
distinguish between changes triggered by a user and changes
triggered programmatically, for example.
@chainable
**/
toggleOpen: function (options) {
this.tree.toggleOpenNode(this, options);
return this;
}
};
Y.Tree.Node.Openable = NodeOpenable;
}, '3.11.0', {"requires": ["tree"]});
| devmix/openjst | server/commons/war/src/main/webapp/ui/lib/yui/build/tree-openable/tree-openable.js | JavaScript | agpl-3.0 | 7,103 |
<?php
if (!defined('sugarEntry') || !sugarEntry) {
die('Not A Valid Entry Point');
}
/**
*
* SugarCRM Community Edition is a customer relationship management program developed by
* SugarCRM, Inc. Copyright (C) 2004-2013 SugarCRM Inc.
*
* SuiteCRM is an extension to SugarCRM Community Edition developed by SalesAgility Ltd.
* Copyright (C) 2011 - 2018 SalesAgility Ltd.
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License version 3 as published by the
* Free Software Foundation with the addition of the following permission added
* to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED WORK
* IN WHICH THE COPYRIGHT IS OWNED BY SUGARCRM, SUGARCRM DISCLAIMS THE WARRANTY
* OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License along with
* this program; if not, see http://www.gnu.org/licenses or write to the Free
* Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301 USA.
*
* You can contact SugarCRM, Inc. headquarters at 10050 North Wolfe Road,
* SW2-130, Cupertino, CA 95014, USA. or at email address contact@sugarcrm.com.
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Public License version 3.
*
* In accordance with Section 7(b) of the GNU Affero General Public License version 3,
* these Appropriate Legal Notices must retain the display of the "Powered by
* SugarCRM" logo and "Supercharged by SuiteCRM" logo. If the display of the logos is not
* reasonably feasible for technical reasons, the Appropriate Legal Notices must
* display the words "Powered by SugarCRM" and "Supercharged by SuiteCRM".
*/
/*********************************************************************************
* Description: TODO: To be written.
* Portions created by SugarCRM are Copyright (C) SugarCRM, Inc.
* All Rights Reserved.
* Contributor(s): ______________________________________..
********************************************************************************/
require_once('include/DetailView/DetailView.php');
require_once('modules/Campaigns/Charts.php');
global $mod_strings;
global $app_strings;
global $app_list_strings;
global $sugar_version, $sugar_config;
global $theme;
$GLOBALS['log']->info("Campaign detail view");
$xtpl=new XTemplate('modules/Campaigns/PopupCampaignRoi.html');
//_pp($_REQUEST['id']);
$campaign_id=$_REQUEST['id'];
$campaign = new Campaign();
$opp_query1 = "select camp.name, camp.actual_cost,camp.budget,camp.expected_revenue,count(*) opp_count,SUM(opp.amount) as Revenue, SUM(camp.actual_cost) as Investment,
ROUND((SUM(opp.amount) - SUM(camp.actual_cost))/(SUM(camp.actual_cost)), 2)*100 as ROI";
$opp_query1 .= " from opportunities opp";
$opp_query1 .= " right join campaigns camp on camp.id = opp.campaign_id";
$opp_query1 .= " where opp.sales_stage = 'Closed Won' and camp.id='$campaign_id'";
$opp_query1 .= " group by camp.name";
//$opp_query1 .= " and deleted=0";
$opp_result1=$campaign->db->query($opp_query1);
$opp_data1=$campaign->db->fetchByAssoc($opp_result1);
//get the click-throughs
$query_click = "SELECT count(*) hits ";
$query_click.= " FROM campaign_log ";
$query_click.= " WHERE campaign_id = '$campaign_id' AND activity_type='link' AND related_type='CampaignTrackers' AND archived=0 AND deleted=0";
//if $marketing id is specified, then lets filter the chart by the value
if (!empty($marketing_id)) {
$query_click.= " AND marketing_id ='$marketing_id'";
}
$query_click.= " GROUP BY activity_type, target_type";
$query_click.= " ORDER BY activity_type, target_type";
$result = $campaign->db->query($query_click);
$xtpl->assign("OPP_COUNT", $opp_data1['opp_count']);
$xtpl->assign("ACTUAL_COST", $opp_data1['actual_cost']);
$xtpl->assign("PLANNED_BUDGET", $opp_data1['budget']);
$xtpl->assign("EXPECTED_REVENUE", $opp_data1['expected_revenue']);
$currency = new Currency();
if (isset($focus->currency_id) && !empty($focus->currency_id)) {
$currency->retrieve($focus->currency_id);
if ($currency->deleted != 1) {
$xtpl->assign("CURRENCY", $currency->iso4217 .' '.$currency->symbol);
} else {
$xtpl->assign("CURRENCY", $currency->getDefaultISO4217() .' '.$currency->getDefaultCurrencySymbol());
}
} else {
$xtpl->assign("CURRENCY", $currency->getDefaultISO4217() .' '.$currency->getDefaultCurrencySymbol());
}
global $current_user;
if (is_admin($current_user) && $_REQUEST['module'] != 'DynamicLayout' && !empty($_SESSION['editinplace'])) {
$xtpl->assign("ADMIN_EDIT", "<a href='index.php?action=index&module=DynamicLayout&from_action=".$_REQUEST['action'] ."&from_module=".$_REQUEST['module'] ."&record=".$_REQUEST['record']. "'>".SugarThemeRegistry::current()->getImage("EditLayout", "border='0' align='bottom'", null, null, '.gif', $mod_strings['LBL_EDIT_LAYOUT'])."</a>");
}
//$detailView->processListNavigation($xtpl, "CAMPAIGN", $offset, $focus->is_AuditEnabled());
// adding custom fields:
//require_once('modules/DynamicFields/templates/Files/DetailView.php');
/* we need to build the dropdown of related marketing values
$latest_marketing_id = '';
$selected_marketing_id = '';
if(isset($_REQUEST['mkt_id'])) $selected_marketing_id = $_REQUEST['mkt_id'];
$options_str = '<option value="all">--None--</option>';
//query for all email marketing records related to this campaign
$latest_marketing_query = "select id, name, date_modified from email_marketing where campaign_id = '$focus->id' order by date_modified desc";
//build string with value(s) retrieved
$result =$campaign->db->query($latest_marketing_query);
if ($row = $campaign->db->fetchByAssoc($result)){
//first, populated the latest marketing id variable, as this
// variable will be used to build chart and subpanels
$latest_marketing_id = $row['id'];
//fill in first option value
$options_str .= '<option value="'. $row['id'] .'"';
// if the marketing id is same as selected marketing id, set this option to render as "selected"
if (!empty($selected_marketing_id) && $selected_marketing_id == $row['id']) {
$options_str .=' selected>'. $row['name'] .'</option>';
// if the marketing id is empty then set this first option to render as "selected"
}elseif(empty($selected_marketing_id)){
$options_str .=' selected>'. $row['name'] .'</option>';
// if the marketing is not empty, but not same as selected marketing id, then..
//.. do not set this option to render as "selected"
}else{
$options_str .='>'. $row['name'] .'</option>';
}
}
//process rest of records, if they exist
while ($row = $campaign->db->fetchByAssoc($result)){
//add to list of option values
$options_str .= '<option value="'. $row['id'] .'"';
//if the marketing id is same as selected marketing id, then set this option to render as "selected"
if (!empty($selected_marketing_id) && $selected_marketing_id == $row['id']) {
$options_str .=' selected>'. $row['name'] .'</option>';
}else{
$options_str .=' >'. $row['name'] .'</option>';
}
}
//populate the dropdown
$xtpl->assign("MKT_DROP_DOWN",$options_str);
*/
//add chart
$seps = array("-", "/");
$dates = array(date($GLOBALS['timedate']->dbDayFormat), $GLOBALS['timedate']->dbDayFormat);
$dateFileNameSafe = str_replace($seps, "_", $dates);
$cache_file_name_roi = $current_user->getUserPrivGuid()."_campaign_response_by_roi_".$dateFileNameSafe[0]."_".$dateFileNameSafe[1].".xml";
$chart= new campaign_charts();
//ob_start();
//if marketing id has been selected, then set "latest_marketing_id" to the selected value
//latest marketing id will be passed in to filter the charts and subpanels
if (!empty($selected_marketing_id)) {
$latest_marketing_id = $selected_marketing_id;
}
if (empty($latest_marketing_id) || $latest_marketing_id === 'all') {
$xtpl->assign("MY_CHART_ROI", $chart->campaign_response_roi_popup($app_list_strings['roi_type_dom'], $app_list_strings['roi_type_dom'], $campaign_id, sugar_cached("xml/") . $cache_file_name_roi, true));
} else {
$xtpl->assign("MY_CHART_ROI", $chart->campaign_response_roi_popup($app_list_strings['roi_type_dom'], $app_list_strings['roi_type_dom'], $campaign_id, sugar_cached("xml/") .$cache_file_name_roi, true));
}
//$output_html .= ob_get_contents();
//ob_end_clean();
//_ppd($xtpl);
//end chart
$xtpl->parse("main");
$xtpl->out("main");
| gcoop-libre/SuiteCRM | modules/Campaigns/PopupCampaignRoi.php | PHP | agpl-3.0 | 9,312 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('organizations', '0002_migrate_locations_to_facilities'),
('notifications', '0003_auto_20150912_2049'),
]
operations = [
migrations.AlterField(
model_name='notification',
name='location',
field=models.ForeignKey(verbose_name='facility', to='organizations.Facility'),
),
migrations.RenameField(
model_name='notification',
old_name='location',
new_name='facility',
),
migrations.AlterField(
model_name='notification',
name='facility',
field=models.ForeignKey(to='organizations.Facility'),
),
]
| alper/volunteer_planner | notifications/migrations/0004_auto_20151003_2033.py | Python | agpl-3.0 | 847 |
/*
Copyright (c) 2003-2017, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang( 'blockquote', 'af', {
toolbar: 'Sitaatblok'
} );
| ging/vish_orange | lib/plugins/ediphy/app/assets/javascripts/lib/ckeditor/plugins/blockquote/blockquote/lang/af.js | JavaScript | agpl-3.0 | 219 |
class AddAppIdToEmails < ActiveRecord::Migration
def change
add_column :emails, :app_id, :integer
end
end
| WebsterFolksLabs/cuttlefish | db/migrate/20130425025753_add_app_id_to_emails.rb | Ruby | agpl-3.0 | 114 |
package org.cbioportal.service.impl;
import java.math.BigDecimal;
import java.util.*;
import org.cbioportal.model.*;
import org.cbioportal.model.meta.GenericAssayMeta;
import org.cbioportal.persistence.MolecularDataRepository;
import org.cbioportal.service.GeneService;
import org.cbioportal.service.GenericAssayService;
import org.cbioportal.service.MolecularProfileService;
import org.cbioportal.service.SampleService;
import org.cbioportal.service.exception.MolecularProfileNotFoundException;
import org.cbioportal.service.util.ExpressionEnrichmentUtil;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class ExpressionEnrichmentServiceImplTest extends BaseServiceImplTest {
@InjectMocks
private ExpressionEnrichmentServiceImpl enrichmentServiceImpl;
@Mock
private SampleService sampleService;
@Mock
private MolecularProfileService molecularProfileService;
@Mock
private MolecularDataRepository molecularDataRepository;
@Mock
private GeneService geneService;
@Spy
@InjectMocks
private ExpressionEnrichmentUtil expressionEnrichmentUtil;
@Mock
private GenericAssayService genericAssayService;
CancerStudy cancerStudy = new CancerStudy();
MolecularProfile geneMolecularProfile = new MolecularProfile();
MolecularProfileSamples molecularProfileSamples = new MolecularProfileSamples();
List<Sample> samples = new ArrayList<>();
Map<String, List<MolecularProfileCaseIdentifier>> molecularProfileCaseSets = new HashMap<>();
Map<String, List<MolecularProfileCaseIdentifier>> molecularProfilePatientLevelCaseSets = new HashMap<>();
// patient level only data
public static final String SAMPLE_ID5 = "sample_id5";
@Before
public void setup() throws MolecularProfileNotFoundException {
cancerStudy.setReferenceGenome(ReferenceGenome.HOMO_SAPIENS_DEFAULT_GENOME_NAME);
cancerStudy.setCancerStudyIdentifier(STUDY_ID);
geneMolecularProfile.setCancerStudyIdentifier(STUDY_ID);
geneMolecularProfile.setStableId(MOLECULAR_PROFILE_ID);
geneMolecularProfile.setCancerStudy(cancerStudy);
molecularProfileSamples.setMolecularProfileId(MOLECULAR_PROFILE_ID);
molecularProfileSamples.setCommaSeparatedSampleIds("1,2,3,4");
Sample sample1 = new Sample();
sample1.setStableId(SAMPLE_ID1);
sample1.setInternalId(1);
sample1.setCancerStudyIdentifier(STUDY_ID);
sample1.setPatientId(1);
samples.add(sample1);
Sample sample2 = new Sample();
sample2.setStableId(SAMPLE_ID2);
sample2.setInternalId(2);
sample2.setCancerStudyIdentifier(STUDY_ID);
sample2.setPatientId(2);
samples.add(sample2);
Sample sample3 = new Sample();
sample3.setStableId(SAMPLE_ID3);
sample3.setInternalId(3);
sample3.setCancerStudyIdentifier(STUDY_ID);
sample3.setPatientId(3);
samples.add(sample3);
Sample sample4 = new Sample();
sample4.setStableId(SAMPLE_ID4);
sample4.setInternalId(4);
sample4.setCancerStudyIdentifier(STUDY_ID);
sample4.setPatientId(4);
samples.add(sample4);
List<MolecularProfileCaseIdentifier> alteredSampleIdentifieres = new ArrayList<>();
List<MolecularProfileCaseIdentifier> unalteredSampleIdentifieres = new ArrayList<>();
List<MolecularProfileCaseIdentifier> unalteredPatientLevelSampleIdentifieres = new ArrayList<>();
MolecularProfileCaseIdentifier caseIdentifier1 = new MolecularProfileCaseIdentifier();
caseIdentifier1.setMolecularProfileId(MOLECULAR_PROFILE_ID);
caseIdentifier1.setCaseId(SAMPLE_ID1);
alteredSampleIdentifieres.add(caseIdentifier1);
MolecularProfileCaseIdentifier caseIdentifier2 = new MolecularProfileCaseIdentifier();
caseIdentifier2.setMolecularProfileId(MOLECULAR_PROFILE_ID);
caseIdentifier2.setCaseId(SAMPLE_ID2);
alteredSampleIdentifieres.add(caseIdentifier2);
MolecularProfileCaseIdentifier caseIdentifier3 = new MolecularProfileCaseIdentifier();
caseIdentifier3.setMolecularProfileId(MOLECULAR_PROFILE_ID);
caseIdentifier3.setCaseId(SAMPLE_ID3);
unalteredSampleIdentifieres.add(caseIdentifier3);
unalteredPatientLevelSampleIdentifieres.add(caseIdentifier3);
MolecularProfileCaseIdentifier caseIdentifier4 = new MolecularProfileCaseIdentifier();
caseIdentifier4.setMolecularProfileId(MOLECULAR_PROFILE_ID);
caseIdentifier4.setCaseId(SAMPLE_ID4);
unalteredSampleIdentifieres.add(caseIdentifier4);
unalteredPatientLevelSampleIdentifieres.add(caseIdentifier4);
// patient level only data
MolecularProfileCaseIdentifier caseIdentifier5 = new MolecularProfileCaseIdentifier();
caseIdentifier5.setMolecularProfileId(MOLECULAR_PROFILE_ID);
caseIdentifier5.setCaseId(SAMPLE_ID5);
unalteredPatientLevelSampleIdentifieres.add(caseIdentifier5);
molecularProfileCaseSets.put("altered samples", alteredSampleIdentifieres);
molecularProfileCaseSets.put("unaltered samples", unalteredSampleIdentifieres);
molecularProfilePatientLevelCaseSets.put("altered samples", alteredSampleIdentifieres);
molecularProfilePatientLevelCaseSets.put("unaltered samples", unalteredPatientLevelSampleIdentifieres);
Mockito.when(molecularProfileService.getMolecularProfile(MOLECULAR_PROFILE_ID))
.thenReturn(geneMolecularProfile);
Mockito.when(molecularDataRepository.getCommaSeparatedSampleIdsOfMolecularProfile(MOLECULAR_PROFILE_ID))
.thenReturn(molecularProfileSamples);
Mockito.when(sampleService.fetchSamples(Arrays.asList(STUDY_ID, STUDY_ID, STUDY_ID, STUDY_ID),
Arrays.asList(SAMPLE_ID3, SAMPLE_ID4, SAMPLE_ID1, SAMPLE_ID2), "ID")).thenReturn(samples);
}
@Test
public void getGenomicEnrichments() throws Exception {
geneMolecularProfile.setMolecularAlterationType(MolecularProfile.MolecularAlterationType.MRNA_EXPRESSION);
List<GeneMolecularAlteration> molecularDataList = new ArrayList<GeneMolecularAlteration>();
GeneMolecularAlteration geneMolecularAlteration1 = new GeneMolecularAlteration();
geneMolecularAlteration1.setEntrezGeneId(ENTREZ_GENE_ID_2);
geneMolecularAlteration1.setValues("2,3,2.1,3");
molecularDataList.add(geneMolecularAlteration1);
GeneMolecularAlteration geneMolecularAlteration2 = new GeneMolecularAlteration();
geneMolecularAlteration2.setEntrezGeneId(ENTREZ_GENE_ID_3);
geneMolecularAlteration2.setValues("1.1,5,2.3,3");
molecularDataList.add(geneMolecularAlteration2);
Mockito.when(molecularDataRepository.getGeneMolecularAlterationsIterableFast(MOLECULAR_PROFILE_ID))
.thenReturn(molecularDataList);
List<Gene> expectedGeneList = new ArrayList<>();
Gene gene1 = new Gene();
gene1.setEntrezGeneId(ENTREZ_GENE_ID_2);
gene1.setHugoGeneSymbol(HUGO_GENE_SYMBOL_2);
expectedGeneList.add(gene1);
Gene gene2 = new Gene();
gene2.setEntrezGeneId(ENTREZ_GENE_ID_3);
gene2.setHugoGeneSymbol(HUGO_GENE_SYMBOL_3);
expectedGeneList.add(gene2);
Mockito.when(geneService.fetchGenes(Arrays.asList("2", "3"), "ENTREZ_GENE_ID", "SUMMARY"))
.thenReturn(expectedGeneList);
List<GenomicEnrichment> result = enrichmentServiceImpl.getGenomicEnrichments(MOLECULAR_PROFILE_ID,
molecularProfileCaseSets, EnrichmentType.SAMPLE);
Assert.assertEquals(2, result.size());
GenomicEnrichment expressionEnrichment = result.get(0);
Assert.assertEquals(ENTREZ_GENE_ID_2, expressionEnrichment.getEntrezGeneId());
Assert.assertEquals(HUGO_GENE_SYMBOL_2, expressionEnrichment.getHugoGeneSymbol());
Assert.assertEquals(null, expressionEnrichment.getCytoband());
Assert.assertEquals(2, expressionEnrichment.getGroupsStatistics().size());
GroupStatistics unalteredGroupStats = expressionEnrichment.getGroupsStatistics().get(0);
Assert.assertEquals("unaltered samples", unalteredGroupStats.getName());
Assert.assertEquals(new BigDecimal("2.55"), unalteredGroupStats.getMeanExpression());
Assert.assertEquals(new BigDecimal("0.6363961030678927"), unalteredGroupStats.getStandardDeviation());
GroupStatistics alteredGroupStats = expressionEnrichment.getGroupsStatistics().get(1);
Assert.assertEquals("altered samples", alteredGroupStats.getName());
Assert.assertEquals(new BigDecimal("2.5"), alteredGroupStats.getMeanExpression());
Assert.assertEquals(new BigDecimal("0.7071067811865476"), alteredGroupStats.getStandardDeviation());
Assert.assertEquals(new BigDecimal("0.9475795430163914"), expressionEnrichment.getpValue());
expressionEnrichment = result.get(1);
Assert.assertEquals(ENTREZ_GENE_ID_3, expressionEnrichment.getEntrezGeneId());
Assert.assertEquals(HUGO_GENE_SYMBOL_3, expressionEnrichment.getHugoGeneSymbol());
Assert.assertEquals(null, expressionEnrichment.getCytoband());
Assert.assertEquals(2, expressionEnrichment.getGroupsStatistics().size());
unalteredGroupStats = expressionEnrichment.getGroupsStatistics().get(0);
Assert.assertEquals("unaltered samples", unalteredGroupStats.getName());
Assert.assertEquals(new BigDecimal("2.65"), unalteredGroupStats.getMeanExpression());
Assert.assertEquals(new BigDecimal("0.4949747468305834"), unalteredGroupStats.getStandardDeviation());
alteredGroupStats = expressionEnrichment.getGroupsStatistics().get(1);
Assert.assertEquals("altered samples", alteredGroupStats.getName());
Assert.assertEquals(new BigDecimal("3.05"), alteredGroupStats.getMeanExpression());
Assert.assertEquals(new BigDecimal("2.7577164466275352"), alteredGroupStats.getStandardDeviation());
Assert.assertEquals(new BigDecimal("0.8716148250471419"), expressionEnrichment.getpValue());
}
@Test
public void getGenericAssayEnrichments() throws Exception {
geneMolecularProfile.setMolecularAlterationType(MolecularProfile.MolecularAlterationType.GENERIC_ASSAY);
List<GenericAssayMolecularAlteration> molecularDataList = new ArrayList<GenericAssayMolecularAlteration>();
GenericAssayMolecularAlteration genericAssayMolecularAlteration1 = new GenericAssayMolecularAlteration();
genericAssayMolecularAlteration1.setGenericAssayStableId(HUGO_GENE_SYMBOL_1);
genericAssayMolecularAlteration1.setValues("2,3,2.1,3");
molecularDataList.add(genericAssayMolecularAlteration1);
GenericAssayMolecularAlteration genericAssayMolecularAlteration2 = new GenericAssayMolecularAlteration();
genericAssayMolecularAlteration2.setGenericAssayStableId(HUGO_GENE_SYMBOL_2);
genericAssayMolecularAlteration2.setValues("1.1,5,2.3,3");
molecularDataList.add(genericAssayMolecularAlteration2);
Mockito.when(molecularDataRepository.getGenericAssayMolecularAlterationsIterable(MOLECULAR_PROFILE_ID, null,
"SUMMARY")).thenReturn(molecularDataList);
Mockito.when(genericAssayService.getGenericAssayMetaByStableIdsAndMolecularIds(
Arrays.asList(HUGO_GENE_SYMBOL_1, HUGO_GENE_SYMBOL_2),
Arrays.asList(MOLECULAR_PROFILE_ID, MOLECULAR_PROFILE_ID), "SUMMARY"))
.thenReturn(Arrays.asList(new GenericAssayMeta(HUGO_GENE_SYMBOL_1),
new GenericAssayMeta(HUGO_GENE_SYMBOL_2)));
List<GenericAssayEnrichment> result = enrichmentServiceImpl.getGenericAssayEnrichments(MOLECULAR_PROFILE_ID,
molecularProfileCaseSets, EnrichmentType.SAMPLE);
Assert.assertEquals(2, result.size());
GenericAssayEnrichment genericAssayEnrichment = result.get(0);
Assert.assertEquals(HUGO_GENE_SYMBOL_1, genericAssayEnrichment.getStableId());
Assert.assertEquals(2, genericAssayEnrichment.getGroupsStatistics().size());
GroupStatistics unalteredGroupStats = genericAssayEnrichment.getGroupsStatistics().get(0);
Assert.assertEquals("unaltered samples", unalteredGroupStats.getName());
Assert.assertEquals(new BigDecimal("2.55"), unalteredGroupStats.getMeanExpression());
Assert.assertEquals(new BigDecimal("0.6363961030678927"), unalteredGroupStats.getStandardDeviation());
GroupStatistics alteredGroupStats = genericAssayEnrichment.getGroupsStatistics().get(1);
Assert.assertEquals("altered samples", alteredGroupStats.getName());
Assert.assertEquals(new BigDecimal("2.5"), alteredGroupStats.getMeanExpression());
Assert.assertEquals(new BigDecimal("0.7071067811865476"), alteredGroupStats.getStandardDeviation());
Assert.assertEquals(new BigDecimal("0.9475795430163914"), genericAssayEnrichment.getpValue());
genericAssayEnrichment = result.get(1);
Assert.assertEquals(HUGO_GENE_SYMBOL_2, genericAssayEnrichment.getStableId());
Assert.assertEquals(2, genericAssayEnrichment.getGroupsStatistics().size());
unalteredGroupStats = genericAssayEnrichment.getGroupsStatistics().get(0);
Assert.assertEquals("unaltered samples", unalteredGroupStats.getName());
Assert.assertEquals(new BigDecimal("2.65"), unalteredGroupStats.getMeanExpression());
Assert.assertEquals(new BigDecimal("0.4949747468305834"), unalteredGroupStats.getStandardDeviation());
alteredGroupStats = genericAssayEnrichment.getGroupsStatistics().get(1);
Assert.assertEquals("altered samples", alteredGroupStats.getName());
Assert.assertEquals(new BigDecimal("3.05"), alteredGroupStats.getMeanExpression());
Assert.assertEquals(new BigDecimal("2.7577164466275352"), alteredGroupStats.getStandardDeviation());
Assert.assertEquals(new BigDecimal("0.8716148250471419"), genericAssayEnrichment.getpValue());
}
@Test
public void getGenericAssayPatientLevelEnrichments() throws Exception {
geneMolecularProfile.setMolecularAlterationType(MolecularProfile.MolecularAlterationType.GENERIC_ASSAY);
geneMolecularProfile.setPatientLevel(true);
List<GenericAssayMolecularAlteration> molecularDataList = new ArrayList<GenericAssayMolecularAlteration>();
GenericAssayMolecularAlteration genericAssayMolecularAlteration1 = new GenericAssayMolecularAlteration();
genericAssayMolecularAlteration1.setGenericAssayStableId(HUGO_GENE_SYMBOL_1);
genericAssayMolecularAlteration1.setValues("2,3,2.1,3,3,3");
molecularDataList.add(genericAssayMolecularAlteration1);
GenericAssayMolecularAlteration genericAssayMolecularAlteration2 = new GenericAssayMolecularAlteration();
genericAssayMolecularAlteration2.setGenericAssayStableId(HUGO_GENE_SYMBOL_2);
genericAssayMolecularAlteration2.setValues("1.1,5,2.3,3,3");
molecularDataList.add(genericAssayMolecularAlteration2);
Mockito.when(molecularDataRepository.getGenericAssayMolecularAlterationsIterable(MOLECULAR_PROFILE_ID, null,
"SUMMARY")).thenReturn(molecularDataList);
Mockito.when(genericAssayService.getGenericAssayMetaByStableIdsAndMolecularIds(
Arrays.asList(HUGO_GENE_SYMBOL_1, HUGO_GENE_SYMBOL_2),
Arrays.asList(MOLECULAR_PROFILE_ID, MOLECULAR_PROFILE_ID), "SUMMARY"))
.thenReturn(Arrays.asList(new GenericAssayMeta(HUGO_GENE_SYMBOL_1),
new GenericAssayMeta(HUGO_GENE_SYMBOL_2)));
// add 5th sample which is the second sample of patient 4
Sample sample5 = new Sample();
sample5.setStableId(SAMPLE_ID5);
sample5.setInternalId(5);
sample5.setCancerStudyIdentifier(STUDY_ID);
sample5.setPatientId(4);
samples.add(sample5);
Mockito.when(sampleService.fetchSamples(Arrays.asList(STUDY_ID, STUDY_ID, STUDY_ID, STUDY_ID, STUDY_ID), Arrays.asList(SAMPLE_ID3, SAMPLE_ID4, SAMPLE_ID5, SAMPLE_ID1, SAMPLE_ID2), "ID")).thenReturn(samples);
List<GenericAssayEnrichment> result = enrichmentServiceImpl.getGenericAssayEnrichments(MOLECULAR_PROFILE_ID, molecularProfilePatientLevelCaseSets, EnrichmentType.SAMPLE);
Assert.assertEquals(2, result.size());
GenericAssayEnrichment genericAssayEnrichment = result.get(0);
Assert.assertEquals(HUGO_GENE_SYMBOL_1, genericAssayEnrichment.getStableId());
Assert.assertEquals(2, genericAssayEnrichment.getGroupsStatistics().size());
GroupStatistics unalteredGroupStats = genericAssayEnrichment.getGroupsStatistics().get(0);
Assert.assertEquals("unaltered samples", unalteredGroupStats.getName());
Assert.assertEquals(new BigDecimal("2.55"), unalteredGroupStats.getMeanExpression());
Assert.assertEquals(new BigDecimal("0.6363961030678927"), unalteredGroupStats.getStandardDeviation());
GroupStatistics alteredGroupStats = genericAssayEnrichment.getGroupsStatistics().get(1);
Assert.assertEquals("altered samples", alteredGroupStats.getName());
Assert.assertEquals(new BigDecimal("2.5"), alteredGroupStats.getMeanExpression());
Assert.assertEquals(new BigDecimal("0.7071067811865476"), alteredGroupStats.getStandardDeviation());
Assert.assertEquals(new BigDecimal("0.9475795430163914"), genericAssayEnrichment.getpValue());
genericAssayEnrichment = result.get(1);
Assert.assertEquals(HUGO_GENE_SYMBOL_2, genericAssayEnrichment.getStableId());
Assert.assertEquals(2, genericAssayEnrichment.getGroupsStatistics().size());
unalteredGroupStats = genericAssayEnrichment.getGroupsStatistics().get(0);
Assert.assertEquals("unaltered samples", unalteredGroupStats.getName());
Assert.assertEquals(new BigDecimal("2.65"), unalteredGroupStats.getMeanExpression());
Assert.assertEquals(new BigDecimal("0.4949747468305834"), unalteredGroupStats.getStandardDeviation());
alteredGroupStats = genericAssayEnrichment.getGroupsStatistics().get(1);
Assert.assertEquals("altered samples", alteredGroupStats.getName());
Assert.assertEquals(new BigDecimal("3.05"), alteredGroupStats.getMeanExpression());
Assert.assertEquals(new BigDecimal("2.7577164466275352"), alteredGroupStats.getStandardDeviation());
Assert.assertEquals(new BigDecimal("0.8716148250471419"), genericAssayEnrichment.getpValue());
}
}
| onursumer/cbioportal | service/src/test/java/org/cbioportal/service/impl/ExpressionEnrichmentServiceImplTest.java | Java | agpl-3.0 | 18,869 |
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from cStringIO import StringIO
import sys
import tempfile
import unittest2 as unittest
import numpy
from nupic.encoders.base import defaultDtype
from nupic.data import SENTINEL_VALUE_FOR_MISSING_DATA
from nupic.data.fieldmeta import FieldMetaType
from nupic.support.unittesthelpers.algorithm_test_helpers import getSeed
from nupic.encoders.random_distributed_scalar import (
RandomDistributedScalarEncoder
)
try:
import capnp
except ImportError:
capnp = None
if capnp:
from nupic.encoders.random_distributed_scalar_capnp import (
RandomDistributedScalarEncoderProto
)
# Disable warnings about accessing protected members
# pylint: disable=W0212
def computeOverlap(x, y):
"""
Given two binary arrays, compute their overlap. The overlap is the number
of bits where x[i] and y[i] are both 1
"""
return (x & y).sum()
def validateEncoder(encoder, subsampling):
"""
Given an encoder, calculate overlaps statistics and ensure everything is ok.
We don't check every possible combination for speed reasons.
"""
for i in range(encoder.minIndex, encoder.maxIndex+1, 1):
for j in range(i+1, encoder.maxIndex+1, subsampling):
if not encoder._overlapOK(i, j):
return False
return True
class RandomDistributedScalarEncoderTest(unittest.TestCase):
"""
Unit tests for RandomDistributedScalarEncoder class.
"""
def testEncoding(self):
"""
Test basic encoding functionality. Create encodings without crashing and
check they contain the correct number of on and off bits. Check some
encodings for expected overlap. Test that encodings for old values don't
change once we generate new buckets.
"""
# Initialize with non-default parameters and encode with a number close to
# the offset
encoder = RandomDistributedScalarEncoder(name="encoder", resolution=1.0,
w=23, n=500, offset=0.0)
e0 = encoder.encode(-0.1)
self.assertEqual(e0.sum(), 23, "Number of on bits is incorrect")
self.assertEqual(e0.size, 500, "Width of the vector is incorrect")
self.assertEqual(encoder.getBucketIndices(0.0)[0], encoder._maxBuckets / 2,
"Offset doesn't correspond to middle bucket")
self.assertEqual(len(encoder.bucketMap), 1, "Number of buckets is not 1")
# Encode with a number that is resolution away from offset. Now we should
# have two buckets and this encoding should be one bit away from e0
e1 = encoder.encode(1.0)
self.assertEqual(len(encoder.bucketMap), 2, "Number of buckets is not 2")
self.assertEqual(e1.sum(), 23, "Number of on bits is incorrect")
self.assertEqual(e1.size, 500, "Width of the vector is incorrect")
self.assertEqual(computeOverlap(e0, e1), 22, "Overlap is not equal to w-1")
# Encode with a number that is resolution*w away from offset. Now we should
# have many buckets and this encoding should have very little overlap with
# e0
e25 = encoder.encode(25.0)
self.assertGreater(len(encoder.bucketMap), 23,
"Number of buckets is not 2")
self.assertEqual(e25.sum(), 23, "Number of on bits is incorrect")
self.assertEqual(e25.size, 500, "Width of the vector is incorrect")
self.assertLess(computeOverlap(e0, e25), 4, "Overlap is too high")
# Test encoding consistency. The encodings for previous numbers
# shouldn't change even though we have added additional buckets
self.assertTrue(numpy.array_equal(e0, encoder.encode(-0.1)),
"Encodings are not consistent - they have changed after new buckets "
"have been created")
self.assertTrue(numpy.array_equal(e1, encoder.encode(1.0)),
"Encodings are not consistent - they have changed after new buckets "
"have been created")
def testMissingValues(self):
"""
Test that missing values and NaN return all zero's.
"""
encoder = RandomDistributedScalarEncoder(name="encoder", resolution=1.0)
empty = encoder.encode(SENTINEL_VALUE_FOR_MISSING_DATA)
self.assertEqual(empty.sum(), 0)
empty = encoder.encode(float("nan"))
self.assertEqual(empty.sum(), 0)
def testResolution(self):
"""
Test that numbers within the same resolution return the same encoding.
Numbers outside the resolution should return different encodings.
"""
encoder = RandomDistributedScalarEncoder(name="encoder", resolution=1.0)
# Since 23.0 is the first encoded number, it will be the offset.
# Since resolution is 1, 22.9 and 23.4 should have the same bucket index and
# encoding.
e23 = encoder.encode(23.0)
e23p1 = encoder.encode(23.1)
e22p9 = encoder.encode(22.9)
e24 = encoder.encode(24.0)
self.assertEqual(e23.sum(), encoder.w)
self.assertEqual((e23 == e23p1).sum(), encoder.getWidth(),
"Numbers within resolution don't have the same encoding")
self.assertEqual((e23 == e22p9).sum(), encoder.getWidth(),
"Numbers within resolution don't have the same encoding")
self.assertNotEqual((e23 == e24).sum(), encoder.getWidth(),
"Numbers outside resolution have the same encoding")
e22p9 = encoder.encode(22.5)
self.assertNotEqual((e23 == e22p9).sum(), encoder.getWidth(),
"Numbers outside resolution have the same encoding")
def testMapBucketIndexToNonZeroBits(self):
"""
Test that mapBucketIndexToNonZeroBits works and that max buckets and
clipping are handled properly.
"""
encoder = RandomDistributedScalarEncoder(resolution=1.0, w=11, n=150)
# Set a low number of max buckets
encoder._initializeBucketMap(10, None)
encoder.encode(0.0)
encoder.encode(-7.0)
encoder.encode(7.0)
self.assertEqual(len(encoder.bucketMap), encoder._maxBuckets,
"_maxBuckets exceeded")
self.assertTrue(
numpy.array_equal(encoder.mapBucketIndexToNonZeroBits(-1),
encoder.bucketMap[0]),
"mapBucketIndexToNonZeroBits did not handle negative"
" index")
self.assertTrue(
numpy.array_equal(encoder.mapBucketIndexToNonZeroBits(1000),
encoder.bucketMap[9]),
"mapBucketIndexToNonZeroBits did not handle negative index")
e23 = encoder.encode(23.0)
e6 = encoder.encode(6)
self.assertEqual((e23 == e6).sum(), encoder.getWidth(),
"Values not clipped correctly during encoding")
ep8 = encoder.encode(-8)
ep7 = encoder.encode(-7)
self.assertEqual((ep8 == ep7).sum(), encoder.getWidth(),
"Values not clipped correctly during encoding")
self.assertEqual(encoder.getBucketIndices(-8)[0], 0,
"getBucketIndices returned negative bucket index")
self.assertEqual(encoder.getBucketIndices(23)[0], encoder._maxBuckets-1,
"getBucketIndices returned bucket index that is too"
" large")
def testParameterChecks(self):
"""
Test that some bad construction parameters get handled.
"""
# n must be >= 6*w
with self.assertRaises(ValueError):
RandomDistributedScalarEncoder(name="mv", resolution=1.0, n=int(5.9*21))
# n must be an int
with self.assertRaises(ValueError):
RandomDistributedScalarEncoder(name="mv", resolution=1.0, n=5.9*21)
# w can't be negative
with self.assertRaises(ValueError):
RandomDistributedScalarEncoder(name="mv", resolution=1.0, w=-1)
# resolution can't be negative
with self.assertRaises(ValueError):
RandomDistributedScalarEncoder(name="mv", resolution=-2)
def testOverlapStatistics(self):
"""
Check that the overlaps for the encodings are within the expected range.
Here we ask the encoder to create a bunch of representations under somewhat
stressful conditions, and then verify they are correct. We rely on the fact
that the _overlapOK and _countOverlapIndices methods are working correctly.
"""
seed = getSeed()
# Generate about 600 encodings. Set n relatively low to increase
# chance of false overlaps
encoder = RandomDistributedScalarEncoder(resolution=1.0, w=11, n=150,
seed=seed)
encoder.encode(0.0)
encoder.encode(-300.0)
encoder.encode(300.0)
self.assertTrue(validateEncoder(encoder, subsampling=3),
"Illegal overlap encountered in encoder")
def testGetMethods(self):
"""
Test that the getWidth, getDescription, and getDecoderOutputFieldTypes
methods work.
"""
encoder = RandomDistributedScalarEncoder(name="theName", resolution=1.0, n=500)
self.assertEqual(encoder.getWidth(), 500,
"getWidth doesn't return the correct result")
self.assertEqual(encoder.getDescription(), [("theName", 0)],
"getDescription doesn't return the correct result")
self.assertEqual(encoder.getDecoderOutputFieldTypes(),
(FieldMetaType.float, ),
"getDecoderOutputFieldTypes doesn't return the correct"
" result")
def testOffset(self):
"""
Test that offset is working properly
"""
encoder = RandomDistributedScalarEncoder(name="encoder", resolution=1.0)
encoder.encode(23.0)
self.assertEqual(encoder._offset, 23.0,
"Offset not specified and not initialized to first input")
encoder = RandomDistributedScalarEncoder(name="encoder", resolution=1.0,
offset=25.0)
encoder.encode(23.0)
self.assertEqual(encoder._offset, 25.0,
"Offset not initialized to specified constructor"
" parameter")
def testSeed(self):
"""
Test that initializing twice with the same seed returns identical encodings
and different when not specified
"""
encoder1 = RandomDistributedScalarEncoder(name="encoder1", resolution=1.0,
seed=42)
encoder2 = RandomDistributedScalarEncoder(name="encoder2", resolution=1.0,
seed=42)
encoder3 = RandomDistributedScalarEncoder(name="encoder3", resolution=1.0,
seed=-1)
encoder4 = RandomDistributedScalarEncoder(name="encoder4", resolution=1.0,
seed=-1)
e1 = encoder1.encode(23.0)
e2 = encoder2.encode(23.0)
e3 = encoder3.encode(23.0)
e4 = encoder4.encode(23.0)
self.assertEqual((e1 == e2).sum(), encoder1.getWidth(),
"Same seed gives rise to different encodings")
self.assertNotEqual((e1 == e3).sum(), encoder1.getWidth(),
"Different seeds gives rise to same encodings")
self.assertNotEqual((e3 == e4).sum(), encoder1.getWidth(),
"seeds of -1 give rise to same encodings")
def testCountOverlapIndices(self):
"""
Test that the internal method _countOverlapIndices works as expected.
"""
# Create a fake set of encodings.
encoder = RandomDistributedScalarEncoder(name="encoder", resolution=1.0,
w=5, n=5*20)
midIdx = encoder._maxBuckets/2
encoder.bucketMap[midIdx-2] = numpy.array(range(3, 8))
encoder.bucketMap[midIdx-1] = numpy.array(range(4, 9))
encoder.bucketMap[midIdx] = numpy.array(range(5, 10))
encoder.bucketMap[midIdx+1] = numpy.array(range(6, 11))
encoder.bucketMap[midIdx+2] = numpy.array(range(7, 12))
encoder.bucketMap[midIdx+3] = numpy.array(range(8, 13))
encoder.minIndex = midIdx - 2
encoder.maxIndex = midIdx + 3
# Indices must exist
with self.assertRaises(ValueError):
encoder._countOverlapIndices(midIdx-3, midIdx-2)
with self.assertRaises(ValueError):
encoder._countOverlapIndices(midIdx-2, midIdx-3)
# Test some overlaps
self.assertEqual(encoder._countOverlapIndices(midIdx-2, midIdx-2), 5,
"_countOverlapIndices didn't work")
self.assertEqual(encoder._countOverlapIndices(midIdx-1, midIdx-2), 4,
"_countOverlapIndices didn't work")
self.assertEqual(encoder._countOverlapIndices(midIdx+1, midIdx-2), 2,
"_countOverlapIndices didn't work")
self.assertEqual(encoder._countOverlapIndices(midIdx-2, midIdx+3), 0,
"_countOverlapIndices didn't work")
def testOverlapOK(self):
"""
Test that the internal method _overlapOK works as expected.
"""
# Create a fake set of encodings.
encoder = RandomDistributedScalarEncoder(name="encoder", resolution=1.0,
w=5, n=5*20)
midIdx = encoder._maxBuckets/2
encoder.bucketMap[midIdx-3] = numpy.array(range(4, 9)) # Not ok with
# midIdx-1
encoder.bucketMap[midIdx-2] = numpy.array(range(3, 8))
encoder.bucketMap[midIdx-1] = numpy.array(range(4, 9))
encoder.bucketMap[midIdx] = numpy.array(range(5, 10))
encoder.bucketMap[midIdx+1] = numpy.array(range(6, 11))
encoder.bucketMap[midIdx+2] = numpy.array(range(7, 12))
encoder.bucketMap[midIdx+3] = numpy.array(range(8, 13))
encoder.minIndex = midIdx - 3
encoder.maxIndex = midIdx + 3
self.assertTrue(encoder._overlapOK(midIdx, midIdx-1),
"_overlapOK didn't work")
self.assertTrue(encoder._overlapOK(midIdx-2, midIdx+3),
"_overlapOK didn't work")
self.assertFalse(encoder._overlapOK(midIdx-3, midIdx-1),
"_overlapOK didn't work")
# We'll just use our own numbers
self.assertTrue(encoder._overlapOK(100, 50, 0),
"_overlapOK didn't work for far values")
self.assertTrue(encoder._overlapOK(100, 50, encoder._maxOverlap),
"_overlapOK didn't work for far values")
self.assertFalse(encoder._overlapOK(100, 50, encoder._maxOverlap+1),
"_overlapOK didn't work for far values")
self.assertTrue(encoder._overlapOK(50, 50, 5),
"_overlapOK didn't work for near values")
self.assertTrue(encoder._overlapOK(48, 50, 3),
"_overlapOK didn't work for near values")
self.assertTrue(encoder._overlapOK(46, 50, 1),
"_overlapOK didn't work for near values")
self.assertTrue(encoder._overlapOK(45, 50, encoder._maxOverlap),
"_overlapOK didn't work for near values")
self.assertFalse(encoder._overlapOK(48, 50, 4),
"_overlapOK didn't work for near values")
self.assertFalse(encoder._overlapOK(48, 50, 2),
"_overlapOK didn't work for near values")
self.assertFalse(encoder._overlapOK(46, 50, 2),
"_overlapOK didn't work for near values")
self.assertFalse(encoder._overlapOK(50, 50, 6),
"_overlapOK didn't work for near values")
def testCountOverlap(self):
"""
Test that the internal method _countOverlap works as expected.
"""
encoder = RandomDistributedScalarEncoder(name="encoder", resolution=1.0,
n=500)
r1 = numpy.array([1, 2, 3, 4, 5, 6])
r2 = numpy.array([1, 2, 3, 4, 5, 6])
self.assertEqual(encoder._countOverlap(r1, r2), 6,
"_countOverlap result is incorrect")
r1 = numpy.array([1, 2, 3, 4, 5, 6])
r2 = numpy.array([1, 2, 3, 4, 5, 7])
self.assertEqual(encoder._countOverlap(r1, r2), 5,
"_countOverlap result is incorrect")
r1 = numpy.array([1, 2, 3, 4, 5, 6])
r2 = numpy.array([6, 5, 4, 3, 2, 1])
self.assertEqual(encoder._countOverlap(r1, r2), 6,
"_countOverlap result is incorrect")
r1 = numpy.array([1, 2, 8, 4, 5, 6])
r2 = numpy.array([1, 2, 3, 4, 9, 6])
self.assertEqual(encoder._countOverlap(r1, r2), 4,
"_countOverlap result is incorrect")
r1 = numpy.array([1, 2, 3, 4, 5, 6])
r2 = numpy.array([1, 2, 3])
self.assertEqual(encoder._countOverlap(r1, r2), 3,
"_countOverlap result is incorrect")
r1 = numpy.array([7, 8, 9, 10, 11, 12])
r2 = numpy.array([1, 2, 3, 4, 5, 6])
self.assertEqual(encoder._countOverlap(r1, r2), 0,
"_countOverlap result is incorrect")
def testVerbosity(self):
"""
Test that nothing is printed out when verbosity=0
"""
_stdout = sys.stdout
sys.stdout = _stringio = StringIO()
encoder = RandomDistributedScalarEncoder(name="mv", resolution=1.0,
verbosity=0)
output = numpy.zeros(encoder.getWidth(), dtype=defaultDtype)
encoder.encodeIntoArray(23.0, output)
encoder.getBucketIndices(23.0)
sys.stdout = _stdout
self.assertEqual(len(_stringio.getvalue()), 0,
"zero verbosity doesn't lead to zero output")
def testEncodeInvalidInputType(self):
encoder = RandomDistributedScalarEncoder(name="encoder", resolution=1.0,
verbosity=0)
with self.assertRaises(TypeError):
encoder.encode("String")
@unittest.skipUnless(
capnp, "pycapnp is not installed, skipping serialization test.")
def testWriteRead(self):
original = RandomDistributedScalarEncoder(
name="encoder", resolution=1.0, w=23, n=500, offset=0.0)
originalValue = original.encode(1)
proto1 = RandomDistributedScalarEncoderProto.new_message()
original.write(proto1)
# Write the proto to a temp file and read it back into a new proto
with tempfile.TemporaryFile() as f:
proto1.write(f)
f.seek(0)
proto2 = RandomDistributedScalarEncoderProto.read(f)
encoder = RandomDistributedScalarEncoder.read(proto2)
self.assertIsInstance(encoder, RandomDistributedScalarEncoder)
self.assertEqual(encoder.resolution, original.resolution)
self.assertEqual(encoder.w, original.w)
self.assertEqual(encoder.n, original.n)
self.assertEqual(encoder.name, original.name)
self.assertEqual(encoder.verbosity, original.verbosity)
self.assertEqual(encoder.minIndex, original.minIndex)
self.assertEqual(encoder.maxIndex, original.maxIndex)
encodedFromOriginal = original.encode(1)
encodedFromNew = encoder.encode(1)
self.assertTrue(numpy.array_equal(encodedFromNew, originalValue))
self.assertEqual(original.decode(encodedFromNew),
encoder.decode(encodedFromOriginal))
self.assertEqual(original.random.getSeed(), encoder.random.getSeed())
for key, value in original.bucketMap.items():
self.assertTrue(numpy.array_equal(value, encoder.bucketMap[key]))
if __name__ == "__main__":
unittest.main()
| badlogicmanpreet/nupic | tests/unit/nupic/encoders/random_distributed_scalar_test.py | Python | agpl-3.0 | 19,742 |
/**********************************************************************
*
* GEOS - Geometry Engine Open Source
* http://geos.osgeo.org
*
* Copyright (C) 2011 Sandro Santilli <strk@keybit.net>
* Copyright (C) 2005-2006 Refractions Research Inc.
* Copyright (C) 2001-2002 Vivid Solutions Inc.
*
* This is free software; you can redistribute and/or modify it under
* the terms of the GNU Lesser General Public Licence as published
* by the Free Software Foundation.
* See the COPYING file for more information.
*
**********************************************************************
*
* Last port: geom/LineString.java r320 (JTS-1.12)
*
**********************************************************************/
#include <geos/util/IllegalArgumentException.h>
#include <geos/algorithm/CGAlgorithms.h>
#include <geos/geom/Coordinate.h>
#include <geos/geom/CoordinateSequenceFactory.h>
#include <geos/geom/CoordinateSequence.h>
#include <geos/geom/CoordinateSequenceFilter.h>
#include <geos/geom/CoordinateFilter.h>
#include <geos/geom/Dimension.h>
#include <geos/geom/GeometryFilter.h>
#include <geos/geom/GeometryComponentFilter.h>
#include <geos/geom/GeometryFactory.h>
#include <geos/geom/LineString.h>
#include <geos/geom/Point.h>
#include <geos/geom/MultiPoint.h> // for getBoundary
#include <geos/geom/Envelope.h>
#include <algorithm>
#include <typeinfo>
#include <memory>
#include <cassert>
using namespace std;
using namespace geos::algorithm;
namespace geos {
namespace geom { // geos::geom
/*protected*/
LineString::LineString(const LineString &ls)
:
Geometry(ls),
points(ls.points->clone())
{
//points=ls.points->clone();
}
Geometry*
LineString::reverse() const
{
assert(points.get());
CoordinateSequence* seq = points->clone();
CoordinateSequence::reverse(seq);
assert(getFactory());
return getFactory()->createLineString(seq);
}
/*private*/
void
LineString::validateConstruction()
{
if (points.get()==NULL)
{
points.reset(getFactory()->getCoordinateSequenceFactory()->create());
return;
}
if (points->size()==1)
{
throw util::IllegalArgumentException("point array must contain 0 or >1 elements\n");
}
}
/*protected*/
LineString::LineString(CoordinateSequence *newCoords,
const GeometryFactory *factory)
:
Geometry(factory),
points(newCoords)
{
validateConstruction();
}
/*public*/
LineString::LineString(CoordinateSequence::AutoPtr newCoords,
const GeometryFactory *factory)
:
Geometry(factory),
points(newCoords)
{
validateConstruction();
}
LineString::~LineString()
{
//delete points;
}
CoordinateSequence*
LineString::getCoordinates() const
{
assert(points.get());
return points->clone();
//return points;
}
const CoordinateSequence*
LineString::getCoordinatesRO() const
{
assert(0 != points.get());
return points.get();
}
const Coordinate&
LineString::getCoordinateN(int n) const
{
assert(points.get());
return points->getAt(n);
}
Dimension::DimensionType
LineString::getDimension() const
{
return Dimension::L; // line
}
int
LineString::getCoordinateDimension() const
{
return (int) points->getDimension();
}
int
LineString::getBoundaryDimension() const
{
if (isClosed()) {
return Dimension::False;
}
return 0;
}
bool
LineString::isEmpty() const
{
assert(points.get());
return points->isEmpty();
}
size_t
LineString::getNumPoints() const
{
assert(points.get());
return points->getSize();
}
Point*
LineString::getPointN(size_t n) const
{
assert(getFactory());
assert(points.get());
return getFactory()->createPoint(points->getAt(n));
}
Point*
LineString::getStartPoint() const
{
if (isEmpty()) {
return NULL;
//return new Point(NULL,NULL);
}
return getPointN(0);
}
Point*
LineString::getEndPoint() const
{
if (isEmpty()) {
return NULL;
//return new Point(NULL,NULL);
}
return getPointN(getNumPoints() - 1);
}
bool
LineString::isClosed() const
{
if (isEmpty()) {
return false;
}
return getCoordinateN(0).equals2D(getCoordinateN(getNumPoints()-1));
}
bool
LineString::isRing() const
{
return isClosed() && isSimple();
}
string
LineString::getGeometryType() const
{
return "LineString";
}
Geometry*
LineString::getBoundary() const
{
if (isEmpty()) {
return getFactory()->createMultiPoint();
}
// using the default OGC_SFS MOD2 rule, the boundary of a
// closed LineString is empty
if (isClosed()) {
return getFactory()->createMultiPoint();
}
vector<Geometry*> *pts=new vector<Geometry*>();
pts->push_back(getStartPoint());
pts->push_back(getEndPoint());
MultiPoint *mp = getFactory()->createMultiPoint(pts);
return mp;
}
bool
LineString::isCoordinate(Coordinate& pt) const
{
assert(points.get());
std::size_t npts=points->getSize();
for (std::size_t i = 0; i<npts; i++) {
if (points->getAt(i)==pt) {
return true;
}
}
return false;
}
/*protected*/
Envelope::AutoPtr
LineString::computeEnvelopeInternal() const
{
if (isEmpty()) {
// We don't return NULL here
// as it would indicate "unknown"
// envelope. In this case we
// *know* the envelope is EMPTY.
return Envelope::AutoPtr(new Envelope());
}
assert(points.get());
const Coordinate& c=points->getAt(0);
double minx = c.x;
double miny = c.y;
double maxx = c.x;
double maxy = c.y;
std::size_t npts=points->getSize();
for (std::size_t i=1; i<npts; i++) {
const Coordinate &c=points->getAt(i);
minx = minx < c.x ? minx : c.x;
maxx = maxx > c.x ? maxx : c.x;
miny = miny < c.y ? miny : c.y;
maxy = maxy > c.y ? maxy : c.y;
}
// caller expects a newly allocated Envelope.
// this function won't be called twice, unless
// cached Envelope is invalidated (set to NULL)
return Envelope::AutoPtr(new Envelope(minx, maxx, miny, maxy));
}
bool
LineString::equalsExact(const Geometry *other, double tolerance) const
{
if (!isEquivalentClass(other)) {
return false;
}
const LineString *otherLineString=dynamic_cast<const LineString*>(other);
assert(otherLineString);
size_t npts=points->getSize();
if (npts!=otherLineString->points->getSize()) {
return false;
}
for (size_t i=0; i<npts; ++i) {
if (!equal(points->getAt(i),otherLineString->points->getAt(i),tolerance)) {
return false;
}
}
return true;
}
void
LineString::apply_rw(const CoordinateFilter *filter)
{
assert(points.get());
points->apply_rw(filter);
}
void
LineString::apply_ro(CoordinateFilter *filter) const
{
assert(points.get());
points->apply_ro(filter);
}
void LineString::apply_rw(GeometryFilter *filter)
{
assert(filter);
filter->filter_rw(this);
}
void LineString::apply_ro(GeometryFilter *filter) const
{
assert(filter);
filter->filter_ro(this);
}
/*public*/
void
LineString::normalize()
{
assert(points.get());
std::size_t npts=points->getSize();
std::size_t n=npts/2;
for (std::size_t i=0; i<n; i++) {
std::size_t j = npts - 1 - i;
if (!(points->getAt(i)==points->getAt(j))) {
if (points->getAt(i).compareTo(points->getAt(j)) > 0) {
CoordinateSequence::reverse(points.get());
}
return;
}
}
}
int
LineString::compareToSameClass(const Geometry *ls) const
{
const LineString *line=dynamic_cast<const LineString*>(ls);
assert(line);
// MD - optimized implementation
std::size_t mynpts=points->getSize();
std::size_t othnpts=line->points->getSize();
if ( mynpts > othnpts ) return 1;
if ( mynpts < othnpts ) return -1;
for (std::size_t i=0; i<mynpts; i++)
{
int cmp=points->getAt(i).compareTo(line->points->getAt(i));
if (cmp) return cmp;
}
return 0;
}
const Coordinate*
LineString::getCoordinate() const
{
if (isEmpty()) return NULL;
return &(points->getAt(0));
}
double
LineString::getLength() const
{
return CGAlgorithms::length(points.get());
}
void
LineString::apply_rw(GeometryComponentFilter *filter)
{
assert(filter);
filter->filter_rw(this);
}
void
LineString::apply_ro(GeometryComponentFilter *filter) const
{
assert(filter);
filter->filter_ro(this);
}
void
LineString::apply_rw(CoordinateSequenceFilter& filter)
{
size_t npts=points->size();
if (!npts) return;
for (size_t i = 0; i<npts; ++i)
{
filter.filter_rw(*points, i);
if (filter.isDone()) break;
}
if (filter.isGeometryChanged()) geometryChanged();
}
void
LineString::apply_ro(CoordinateSequenceFilter& filter) const
{
size_t npts=points->size();
if (!npts) return;
for (size_t i = 0; i<npts; ++i)
{
filter.filter_ro(*points, i);
if (filter.isDone()) break;
}
//if (filter.isGeometryChanged()) geometryChanged();
}
GeometryTypeId
LineString::getGeometryTypeId() const
{
return GEOS_LINESTRING;
}
} // namespace geos::geom
} // namespace geos
| manisandro/libgeos | src/geom/LineString.cpp | C++ | lgpl-2.1 | 8,533 |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyIlmbase(AutotoolsPackage):
"""The PyIlmBase libraries provides python bindings for the IlmBase libraries."""
homepage = "https://github.com/AcademySoftwareFoundation/openexr/tree/v2.3.0/PyIlmBase"
url = "https://github.com/AcademySoftwareFoundation/openexr/releases/download/v2.3.0/pyilmbase-2.3.0.tar.gz"
version('2.3.0', sha256='9c898bb16e7bc916c82bebdf32c343c0f2878fc3eacbafa49937e78f2079a425')
depends_on('ilmbase')
depends_on('boost+python')
# https://github.com/AcademySoftwareFoundation/openexr/issues/336
parallel = False
def configure_args(self):
spec = self.spec
args = [
'--with-boost-python-libname=boost_python{0}'.format(
spec['python'].version.up_to(2).joined)
]
return args
| LLNL/spack | var/spack/repos/builtin/packages/py-ilmbase/package.py | Python | lgpl-2.1 | 1,026 |
// ---------------------------------------------------------------------
//
// Copyright (C) 2004 - 2015 by the deal.II authors
//
// This file is part of the deal.II library.
//
// The deal.II library is free software; you can use it, redistribute
// it, and/or modify it under the terms of the GNU Lesser General
// Public License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
// The full text of the license can be found in the file LICENSE at
// the top level of the deal.II distribution.
//
// ---------------------------------------------------------------------
// check SparseMatrix::matrix_norm_square
#include "../tests.h"
#include <deal.II/base/utilities.h>
#include <deal.II/lac/trilinos_vector.h>
#include <deal.II/lac/trilinos_sparse_matrix.h>
#include <iostream>
#include <vector>
void test (TrilinosWrappers::MPI::Vector &v,
TrilinosWrappers::MPI::Vector &w,
TrilinosWrappers::MPI::Vector &x)
{
TrilinosWrappers::SparseMatrix m(v.size(),v.size(),v.size());
for (unsigned int i=0; i<m.m(); ++i)
for (unsigned int j=0; j<m.m(); ++j)
m.set (i,j, i+2*j);
for (unsigned int i=0; i<v.size(); ++i)
{
v(i) = i;
w(i) = i+1;
}
m.compress (VectorOperation::insert);
v.compress (VectorOperation::insert);
w.compress (VectorOperation::insert);
// x=w-Mv
const double s = m.residual (x, v, w);
// make sure we get the expected result
for (unsigned int i=0; i<v.size(); ++i)
{
AssertThrow (v(i) == i, ExcInternalError());
AssertThrow (w(i) == i+1, ExcInternalError());
double result = i+1;
for (unsigned int j=0; j<m.m(); ++j)
result -= (i+2*j)*j;
AssertThrow (x(i) == result, ExcInternalError());
}
AssertThrow (s == x.l2_norm(), ExcInternalError());
deallog << "OK" << std::endl;
}
int main (int argc, char **argv)
{
initlog();
Utilities::MPI::MPI_InitFinalize mpi_initialization (argc, argv, testing_max_num_threads());
try
{
{
TrilinosWrappers::MPI::Vector v;
v.reinit(complete_index_set(100), MPI_COMM_WORLD);
TrilinosWrappers::MPI::Vector w;
w.reinit(complete_index_set(100), MPI_COMM_WORLD);
TrilinosWrappers::MPI::Vector x;
x.reinit(complete_index_set(100), MPI_COMM_WORLD);
test (v,w,x);
}
}
catch (std::exception &exc)
{
std::cerr << std::endl << std::endl
<< "----------------------------------------------------"
<< std::endl;
std::cerr << "Exception on processing: " << std::endl
<< exc.what() << std::endl
<< "Aborting!" << std::endl
<< "----------------------------------------------------"
<< std::endl;
return 1;
}
catch (...)
{
std::cerr << std::endl << std::endl
<< "----------------------------------------------------"
<< std::endl;
std::cerr << "Unknown exception!" << std::endl
<< "Aborting!" << std::endl
<< "----------------------------------------------------"
<< std::endl;
return 1;
};
}
| kalj/dealii | tests/trilinos/sparse_matrix_vector_07.cc | C++ | lgpl-2.1 | 3,251 |
//* This file is part of the MOOSE framework
//* https://www.mooseframework.org
//*
//* All rights reserved, see COPYRIGHT for full restrictions
//* https://github.com/idaholab/moose/blob/master/COPYRIGHT
//*
//* Licensed under LGPL 2.1, please see LICENSE for details
//* https://www.gnu.org/licenses/lgpl-2.1.html
#include "AdaptivityAction.h"
#ifdef LIBMESH_ENABLE_AMR
#include "FEProblem.h"
#include "NonlinearSystemBase.h"
#include "Adaptivity.h"
#include "Executioner.h"
#include "MooseEnum.h"
#include "MooseVariableFE.h"
#include "RelationshipManager.h"
// libMesh includes
#include "libmesh/transient_system.h"
#include "libmesh/system_norm.h"
#include "libmesh/enum_norm_type.h"
registerMooseAction("MooseApp", AdaptivityAction, "setup_adaptivity");
registerMooseAction("MooseApp", AdaptivityAction, "add_geometric_rm");
registerMooseAction("MooseApp", AdaptivityAction, "add_algebraic_rm");
defineLegacyParams(AdaptivityAction);
InputParameters
AdaptivityAction::validParams()
{
InputParameters params = Action::validParams();
MooseEnum estimators("KellyErrorEstimator LaplacianErrorEstimator PatchRecoveryErrorEstimator",
"KellyErrorEstimator");
params.addParam<unsigned int>(
"steps", 0, "The number of adaptivity steps to perform at any one time for steady state");
params.addRangeCheckedParam<unsigned int>(
"interval", 1, "interval>0", "The number of time steps betweeen each adaptivity phase");
params.addParam<unsigned int>(
"initial_adaptivity",
0,
"The number of adaptivity steps to perform using the initial conditions");
params.addParam<Real>("refine_fraction",
0.0,
"The fraction of elements or error to refine. Should be between 0 and 1.");
params.addParam<Real>("coarsen_fraction",
0.0,
"The fraction of elements or error to coarsen. Should be between 0 and 1.");
params.addParam<unsigned int>(
"max_h_level",
0,
"Maximum number of times a single element can be refined. If 0 then infinite.");
params.addParam<MooseEnum>(
"error_estimator", estimators, "The class name of the error estimator you want to use.");
params.addDeprecatedParam<bool>(
"print_changed_info",
false,
"Determines whether information about the mesh is printed when adaptivity occurs",
"Use the Console output parameter 'print_mesh_changed_info'");
params.addParam<Real>("start_time",
-std::numeric_limits<Real>::max(),
"The time that adaptivity will be active after.");
params.addParam<Real>("stop_time",
std::numeric_limits<Real>::max(),
"The time after which adaptivity will no longer be active.");
params.addParam<std::vector<std::string>>(
"weight_names", "List of names of variables that will be associated with weight_values");
params.addParam<std::vector<Real>>(
"weight_values",
"List of values between 0 and 1 to weight the associated weight_names error by");
params.addParam<unsigned int>("cycles_per_step", 1, "The number of adaptivity cycles per step");
params.addParam<bool>(
"show_initial_progress", true, "Show the progress of the initial adaptivity");
params.addParam<bool>(
"recompute_markers_during_cycles", false, "Recompute markers during adaptivity cycles");
return params;
}
AdaptivityAction::AdaptivityAction(InputParameters params) : Action(params) {}
void
AdaptivityAction::act()
{
// Here we are going to mostly mimic the default ghosting in libmesh
// By default libmesh adds:
// 1) GhostPointNeighbors on the mesh
// 2) DefaultCoupling with 1 layer as an algebraic ghosting functor on the dof_map, which also
// gets added to the mesh at the time a new System is added
// 3) DefaultCoupling with 0 layers as a coupling functor on the dof_map, which also gets added to
// the mesh at the time a new System is added
//
// What we will do differently is:
// - The 3rd ghosting functor adds nothing so we will not add it at all
if (_current_task == "add_algebraic_rm")
{
auto rm_params = _factory.getValidParams("ElementSideNeighborLayers");
rm_params.set<std::string>("for_whom") = "Adaptivity";
rm_params.set<MooseMesh *>("mesh") = _mesh.get();
rm_params.set<Moose::RelationshipManagerType>("rm_type") =
Moose::RelationshipManagerType::ALGEBRAIC;
if (rm_params.areAllRequiredParamsValid())
{
auto rm_obj = _factory.create<RelationshipManager>(
"ElementSideNeighborLayers", "adaptivity_algebraic_ghosting", rm_params);
// Delete the resources created on behalf of the RM if it ends up not being added to the
// App.
if (!_app.addRelationshipManager(rm_obj))
_factory.releaseSharedObjects(*rm_obj);
}
else
mooseError("Invalid initialization of ElementSideNeighborLayers");
}
else if (_current_task == "add_geometric_rm")
{
auto rm_params = _factory.getValidParams("MooseGhostPointNeighbors");
rm_params.set<std::string>("for_whom") = "Adaptivity";
rm_params.set<MooseMesh *>("mesh") = _mesh.get();
rm_params.set<Moose::RelationshipManagerType>("rm_type") =
Moose::RelationshipManagerType::GEOMETRIC;
if (rm_params.areAllRequiredParamsValid())
{
auto rm_obj = _factory.create<RelationshipManager>(
"MooseGhostPointNeighbors", "adaptivity_geometric_ghosting", rm_params);
// Delete the resources created on behalf of the RM if it ends up not being added to the
// App.
if (!_app.addRelationshipManager(rm_obj))
_factory.releaseSharedObjects(*rm_obj);
}
else
mooseError("Invalid initialization of MooseGhostPointNeighbors");
}
else if (_current_task == "setup_adaptivity")
{
NonlinearSystemBase & system = _problem->getNonlinearSystemBase();
Adaptivity & adapt = _problem->adaptivity();
// we don't need to run mesh modifiers *again* after they ran already during the mesh
// splitting process. Adaptivity::init must be called for any adaptivity to work, however, so we
// can't just skip it for the useSplit case.
if (_app.isUseSplit())
adapt.init(0, 0);
else
adapt.init(getParam<unsigned int>("steps"), getParam<unsigned int>("initial_adaptivity"));
adapt.setErrorEstimator(getParam<MooseEnum>("error_estimator"));
adapt.setParam("cycles_per_step", getParam<unsigned int>("cycles_per_step"));
adapt.setParam("refine fraction", getParam<Real>("refine_fraction"));
adapt.setParam("coarsen fraction", getParam<Real>("coarsen_fraction"));
adapt.setParam("max h-level", getParam<unsigned int>("max_h_level"));
adapt.setParam("recompute_markers_during_cycles",
getParam<bool>("recompute_markers_during_cycles"));
adapt.setPrintMeshChanged(getParam<bool>("print_changed_info"));
const std::vector<std::string> & weight_names =
getParam<std::vector<std::string>>("weight_names");
const std::vector<Real> & weight_values = getParam<std::vector<Real>>("weight_values");
auto num_weight_names = weight_names.size();
auto num_weight_values = weight_values.size();
if (num_weight_names)
{
if (num_weight_names != num_weight_values)
mooseError("Number of weight_names must be equal to number of weight_values in "
"Execution/Adaptivity");
// If weights have been specified then set the default weight to zero
std::vector<Real> weights(system.nVariables(), 0);
for (MooseIndex(num_weight_names) i = 0; i < num_weight_names; i++)
{
std::string name = weight_names[i];
auto value = weight_values[i];
weights[system.getVariable(0, name).number()] = value;
}
std::vector<FEMNormType> norms(system.nVariables(), H1_SEMINORM);
SystemNorm sys_norm(norms, weights);
adapt.setErrorNorm(sys_norm);
}
adapt.setTimeActive(getParam<Real>("start_time"), getParam<Real>("stop_time"));
adapt.setInterval(getParam<unsigned int>("interval"));
}
}
#endif // LIBMESH_ENABLE_AMR
| nuclear-wizard/moose | framework/src/actions/AdaptivityAction.C | C++ | lgpl-2.1 | 8,214 |
//* This file is part of the MOOSE framework
//* https://www.mooseframework.org
//*
//* All rights reserved, see COPYRIGHT for full restrictions
//* https://github.com/idaholab/moose/blob/master/COPYRIGHT
//*
//* Licensed under LGPL 2.1, please see LICENSE for details
//* https://www.gnu.org/licenses/lgpl-2.1.html
#include "FullSolveMultiApp.h"
#include "LayeredSideFluxAverage.h"
#include "Executioner.h"
// libMesh
#include "libmesh/mesh_tools.h"
registerMooseObject("MooseApp", FullSolveMultiApp);
defineLegacyParams(FullSolveMultiApp);
InputParameters
FullSolveMultiApp::validParams()
{
InputParameters params = MultiApp::validParams();
params.addClassDescription("Performs a complete simulation during each execution.");
params.addParam<bool>(
"no_backup_and_restore",
false,
"True to turn off backup/restore for this multiapp. This is useful when doing steady-state "
"Picard iterations where we want to use the solution of previous Picard iteration as the "
"initial guess of the current Picard iteration");
params.addParam<bool>(
"keep_full_output_history",
false,
"Whether or not to keep the full output history when this multiapp has multiple entries");
return params;
}
FullSolveMultiApp::FullSolveMultiApp(const InputParameters & parameters) : MultiApp(parameters) {}
void
FullSolveMultiApp::backup()
{
if (getParam<bool>("no_backup_and_restore"))
return;
else
MultiApp::backup();
}
void
FullSolveMultiApp::restore()
{
if (getParam<bool>("no_backup_and_restore"))
return;
else
MultiApp::restore();
}
void
FullSolveMultiApp::initialSetup()
{
MultiApp::initialSetup();
if (_has_an_app)
{
Moose::ScopedCommSwapper swapper(_my_comm);
_executioners.resize(_my_num_apps);
// Grab Executioner from each app
for (unsigned int i = 0; i < _my_num_apps; i++)
{
auto & app = _apps[i];
Executioner * ex = app->getExecutioner();
if (!ex)
mooseError("Executioner does not exist!");
ex->init();
_executioners[i] = ex;
}
}
}
bool
FullSolveMultiApp::solveStep(Real /*dt*/, Real /*target_time*/, bool auto_advance)
{
if (!auto_advance)
mooseError("FullSolveMultiApp is not compatible with auto_advance=false");
if (!_has_an_app)
return true;
Moose::ScopedCommSwapper swapper(_my_comm);
int rank;
int ierr;
ierr = MPI_Comm_rank(_communicator.get(), &rank);
mooseCheckMPIErr(ierr);
bool last_solve_converged = true;
for (unsigned int i = 0; i < _my_num_apps; i++)
{
// reset output system if desired
if (!getParam<bool>("keep_full_output_history"))
_apps[i]->getOutputWarehouse().reset();
Executioner * ex = _executioners[i];
ex->execute();
if (!ex->lastSolveConverged())
last_solve_converged = false;
}
return last_solve_converged;
}
| nuclear-wizard/moose | framework/src/multiapps/FullSolveMultiApp.C | C++ | lgpl-2.1 | 2,866 |
# Copyright (c) 2005 Ruby-GNOME2 Project Team
# This program is licenced under the same licence as Ruby-GNOME2.
#
# $Id: cairo-self-intersect.rb,v 1.1 2005/10/12 05:38:30 ktou Exp $
=begin
= cairo/Self Intersect
This demo shows how to use GDK and cairo to show cross.
From http://cairographics.org/samples/xxx_self_intersect.html.
=end
require 'common'
module Demo
class CairoSelfIntersect < CairoWindow
def initialize
super('cairo self intersect')
end
def draw(cr)
cr.move_to(0.3, 0.3)
cr.line_to(0.7, 0.3)
cr.line_to(0.5, 0.3)
cr.line_to(0.5, 0.7)
cr.set_line_width(0.22)
cr.set_line_cap(Cairo::LINE_CAP_ROUND)
cr.set_line_join(Cairo::LINE_JOIN_ROUND)
cr.stroke
end
end
end
| benolee/ruby-gnome2 | gtk2/sample/gtk-demo/cairo-self-intersect.rb | Ruby | lgpl-2.1 | 767 |
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Assistant of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "bookmarkfiltermodel.h"
#include "bookmarkitem.h"
#include "bookmarkmodel.h"
BookmarkFilterModel::BookmarkFilterModel(QObject *parent)
: QAbstractProxyModel(parent)
, hideBookmarks(true)
, sourceModel(0)
{
}
void BookmarkFilterModel::setSourceModel(QAbstractItemModel *_sourceModel)
{
beginResetModel();
if (sourceModel) {
disconnect(sourceModel, SIGNAL(dataChanged(QModelIndex, QModelIndex)),
this, SLOT(changed(QModelIndex, QModelIndex)));
disconnect(sourceModel, SIGNAL(rowsInserted(QModelIndex, int, int)),
this, SLOT(rowsInserted(QModelIndex, int, int)));
disconnect(sourceModel,
SIGNAL(rowsAboutToBeRemoved(QModelIndex, int, int)), this,
SLOT(rowsAboutToBeRemoved(QModelIndex, int, int)));
disconnect(sourceModel, SIGNAL(rowsRemoved(QModelIndex, int, int)),
this, SLOT(rowsRemoved(QModelIndex, int, int)));
disconnect(sourceModel, SIGNAL(layoutAboutToBeChanged()), this,
SLOT(layoutAboutToBeChanged()));
disconnect(sourceModel, SIGNAL(layoutChanged()), this,
SLOT(layoutChanged()));
disconnect(sourceModel, SIGNAL(modelAboutToBeReset()), this,
SLOT(modelAboutToBeReset()));
disconnect(sourceModel, SIGNAL(modelReset()), this, SLOT(modelReset()));
}
QAbstractProxyModel::setSourceModel(sourceModel);
sourceModel = qobject_cast<BookmarkModel*> (_sourceModel);
connect(sourceModel, SIGNAL(dataChanged(QModelIndex, QModelIndex)), this,
SLOT(changed(QModelIndex, QModelIndex)));
connect(sourceModel, SIGNAL(rowsInserted(QModelIndex, int, int)),
this, SLOT(rowsInserted(QModelIndex, int, int)));
connect(sourceModel, SIGNAL(rowsAboutToBeRemoved(QModelIndex, int, int)),
this, SLOT(rowsAboutToBeRemoved(QModelIndex, int, int)));
connect(sourceModel, SIGNAL(rowsRemoved(QModelIndex, int, int)), this,
SLOT(rowsRemoved(QModelIndex, int, int)));
connect(sourceModel, SIGNAL(layoutAboutToBeChanged()), this,
SLOT(layoutAboutToBeChanged()));
connect(sourceModel, SIGNAL(layoutChanged()), this,
SLOT(layoutChanged()));
connect(sourceModel, SIGNAL(modelAboutToBeReset()), this,
SLOT(modelAboutToBeReset()));
connect(sourceModel, SIGNAL(modelReset()), this, SLOT(modelReset()));
if (sourceModel)
setupCache(sourceModel->index(0, 0, QModelIndex()).parent());
endResetModel();
}
int BookmarkFilterModel::rowCount(const QModelIndex &index) const
{
Q_UNUSED(index)
return cache.count();
}
int BookmarkFilterModel::columnCount(const QModelIndex &index) const
{
Q_UNUSED(index)
if (sourceModel)
return sourceModel->columnCount();
return 0;
}
QModelIndex BookmarkFilterModel::mapToSource(const QModelIndex &proxyIndex) const
{
const int row = proxyIndex.row();
if (proxyIndex.isValid() && row >= 0 && row < cache.count())
return cache[row];
return QModelIndex();
}
QModelIndex BookmarkFilterModel::mapFromSource(const QModelIndex &sourceIndex) const
{
return index(cache.indexOf(sourceIndex), 0, QModelIndex());
}
QModelIndex BookmarkFilterModel::parent(const QModelIndex &child) const
{
Q_UNUSED(child)
return QModelIndex();
}
QModelIndex BookmarkFilterModel::index(int row, int column,
const QModelIndex &index) const
{
Q_UNUSED(index)
if (row < 0 || column < 0 || cache.count() <= row
|| !sourceModel || sourceModel->columnCount() <= column) {
return QModelIndex();
}
return createIndex(row, 0);
}
Qt::DropActions BookmarkFilterModel::supportedDropActions () const
{
if (sourceModel)
return sourceModel->supportedDropActions();
return Qt::IgnoreAction;
}
Qt::ItemFlags BookmarkFilterModel::flags(const QModelIndex &index) const
{
if (sourceModel)
return sourceModel->flags(index);
return Qt::NoItemFlags;
}
QVariant BookmarkFilterModel::data(const QModelIndex &index, int role) const
{
if (sourceModel)
return sourceModel->data(mapToSource(index), role);
return QVariant();
}
bool BookmarkFilterModel::setData(const QModelIndex &index, const QVariant &value,
int role)
{
if (sourceModel)
return sourceModel->setData(mapToSource(index), value, role);
return false;
}
void BookmarkFilterModel::filterBookmarks()
{
if (sourceModel) {
beginResetModel();
hideBookmarks = true;
setupCache(sourceModel->index(0, 0, QModelIndex()).parent());
endResetModel();
}
}
void BookmarkFilterModel::filterBookmarkFolders()
{
if (sourceModel) {
beginResetModel();
hideBookmarks = false;
setupCache(sourceModel->index(0, 0, QModelIndex()).parent());
endResetModel();
}
}
void BookmarkFilterModel::changed(const QModelIndex &topLeft,
const QModelIndex &bottomRight)
{
emit dataChanged(mapFromSource(topLeft), mapFromSource(bottomRight));
}
void BookmarkFilterModel::rowsInserted(const QModelIndex &parent, int start,
int end)
{
if (!sourceModel)
return;
QModelIndex cachePrevious = parent;
if (BookmarkItem *parentItem = sourceModel->itemFromIndex(parent)) {
BookmarkItem *newItem = parentItem->child(start);
// iterate over tree hirarchie to find the previous folder
for (int i = 0; i < parentItem->childCount(); ++i) {
if (BookmarkItem *child = parentItem->child(i)) {
const QModelIndex &tmp = sourceModel->indexFromItem(child);
if (tmp.data(UserRoleFolder).toBool() && child != newItem)
cachePrevious = tmp;
}
}
const QModelIndex &newIndex = sourceModel->indexFromItem(newItem);
const bool isFolder = newIndex.data(UserRoleFolder).toBool();
if ((isFolder && hideBookmarks) || (!isFolder && !hideBookmarks)) {
beginInsertRows(mapFromSource(parent), start, end);
const int index = cache.indexOf(cachePrevious) + 1;
if (cache.value(index, QPersistentModelIndex()) != newIndex)
cache.insert(index, newIndex);
endInsertRows();
}
}
}
void BookmarkFilterModel::rowsAboutToBeRemoved(const QModelIndex &parent,
int start, int end)
{
if (!sourceModel)
return;
if (BookmarkItem *parentItem = sourceModel->itemFromIndex(parent)) {
if (BookmarkItem *child = parentItem->child(start)) {
indexToRemove = sourceModel->indexFromItem(child);
if (cache.contains(indexToRemove))
beginRemoveRows(mapFromSource(parent), start, end);
}
}
}
void BookmarkFilterModel::rowsRemoved(const QModelIndex &/*parent*/, int, int)
{
if (cache.contains(indexToRemove)) {
cache.removeAll(indexToRemove);
endRemoveRows();
}
}
void BookmarkFilterModel::layoutAboutToBeChanged()
{
// TODO: ???
}
void BookmarkFilterModel::layoutChanged()
{
// TODO: ???
}
void BookmarkFilterModel::modelAboutToBeReset()
{
beginResetModel();
}
void BookmarkFilterModel::modelReset()
{
if (sourceModel)
setupCache(sourceModel->index(0, 0, QModelIndex()).parent());
endResetModel();
}
void BookmarkFilterModel::setupCache(const QModelIndex &parent)
{
cache.clear();
for (int i = 0; i < sourceModel->rowCount(parent); ++i)
collectItems(sourceModel->index(i, 0, parent));
}
void BookmarkFilterModel::collectItems(const QModelIndex &parent)
{
if (parent.isValid()) {
bool isFolder = sourceModel->data(parent, UserRoleFolder).toBool();
if ((isFolder && hideBookmarks) || (!isFolder && !hideBookmarks))
cache.append(parent);
if (sourceModel->hasChildren(parent)) {
for (int i = 0; i < sourceModel->rowCount(parent); ++i)
collectItems(sourceModel->index(i, 0, parent));
}
}
}
// -- BookmarkTreeModel
BookmarkTreeModel::BookmarkTreeModel(QObject *parent)
: QSortFilterProxyModel(parent)
{
}
int BookmarkTreeModel::columnCount(const QModelIndex &parent) const
{
return qMin(1, QSortFilterProxyModel::columnCount(parent));
}
bool BookmarkTreeModel::filterAcceptsRow(int row, const QModelIndex &parent) const
{
Q_UNUSED(row)
BookmarkModel *model = qobject_cast<BookmarkModel*> (sourceModel());
if (model->rowCount(parent) > 0
&& model->data(model->index(row, 0, parent), UserRoleFolder).toBool())
return true;
return false;
}
| mer-qt/qttools | src/assistant/assistant/bookmarkfiltermodel.cpp | C++ | lgpl-2.1 | 10,492 |
//
// chat_client.cpp
// ~~~~~~~~~~~~~~~
//
// Copyright (c) 2003-2010 Christopher M. Kohlhoff (chris at kohlhoff dot com)
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
#include <cstdlib>
#include <deque>
#include <iostream>
#include <boost/bind.hpp>
#include <boost/asio.hpp>
#include <boost/thread.hpp>
#include "chat_message.hpp"
using boost::asio::ip::tcp;
typedef std::deque<chat_message> chat_message_queue;
class chat_client
{
public:
chat_client(boost::asio::io_service& io_service,
tcp::resolver::iterator endpoint_iterator)
: io_service_(io_service),
socket_(io_service)
{
tcp::endpoint endpoint = *endpoint_iterator;
socket_.async_connect(endpoint,
boost::bind(&chat_client::handle_connect, this,
boost::asio::placeholders::error, ++endpoint_iterator));
}
void write(const chat_message& msg)
{
io_service_.post(boost::bind(&chat_client::do_write, this, msg));
}
void close()
{
io_service_.post(boost::bind(&chat_client::do_close, this));
}
private:
void handle_connect(const boost::system::error_code& error,
tcp::resolver::iterator endpoint_iterator)
{
if (!error)
{
boost::asio::async_read(socket_,
boost::asio::buffer(read_msg_.data(), chat_message::header_length),
boost::bind(&chat_client::handle_read_header, this,
boost::asio::placeholders::error));
}
else if (endpoint_iterator != tcp::resolver::iterator())
{
socket_.close();
tcp::endpoint endpoint = *endpoint_iterator;
socket_.async_connect(endpoint,
boost::bind(&chat_client::handle_connect, this,
boost::asio::placeholders::error, ++endpoint_iterator));
}
}
void handle_read_header(const boost::system::error_code& error)
{
if (!error && read_msg_.decode_header())
{
boost::asio::async_read(socket_,
boost::asio::buffer(read_msg_.body(), read_msg_.body_length()),
boost::bind(&chat_client::handle_read_body, this,
boost::asio::placeholders::error));
}
else
{
do_close();
}
}
void handle_read_body(const boost::system::error_code& error)
{
if (!error)
{
std::cout.write(read_msg_.body(), read_msg_.body_length());
std::cout << "\n";
boost::asio::async_read(socket_,
boost::asio::buffer(read_msg_.data(), chat_message::header_length),
boost::bind(&chat_client::handle_read_header, this,
boost::asio::placeholders::error));
}
else
{
do_close();
}
}
void do_write(chat_message msg)
{
bool write_in_progress = !write_msgs_.empty();
write_msgs_.push_back(msg);
if (!write_in_progress)
{
boost::asio::async_write(socket_,
boost::asio::buffer(write_msgs_.front().data(),
write_msgs_.front().length()),
boost::bind(&chat_client::handle_write, this,
boost::asio::placeholders::error));
}
}
void handle_write(const boost::system::error_code& error)
{
if (!error)
{
write_msgs_.pop_front();
if (!write_msgs_.empty())
{
boost::asio::async_write(socket_,
boost::asio::buffer(write_msgs_.front().data(),
write_msgs_.front().length()),
boost::bind(&chat_client::handle_write, this,
boost::asio::placeholders::error));
}
}
else
{
do_close();
}
}
void do_close()
{
socket_.close();
}
private:
boost::asio::io_service& io_service_;
tcp::socket socket_;
chat_message read_msg_;
chat_message_queue write_msgs_;
};
int main(int argc, char* argv[])
{
try
{
if (argc != 3)
{
std::cerr << "Usage: chat_client <host> <port>\n";
return 1;
}
boost::asio::io_service io_service;
tcp::resolver resolver(io_service);
tcp::resolver::query query(argv[1], argv[2]);
tcp::resolver::iterator iterator = resolver.resolve(query);
chat_client c(io_service, iterator);
boost::thread t(boost::bind(&boost::asio::io_service::run, &io_service));
char line[chat_message::max_body_length + 1];
while (std::cin.getline(line, chat_message::max_body_length + 1))
{
using namespace std; // For strlen and memcpy.
chat_message msg;
msg.body_length(strlen(line));
memcpy(msg.body(), line, msg.body_length());
msg.encode_header();
c.write(msg);
}
c.close();
t.join();
}
catch (std::exception& e)
{
std::cerr << "Exception: " << e.what() << "\n";
}
return 0;
}
| airsim/tvlsim | test/boost/asio/chat/chat_client.cpp | C++ | lgpl-2.1 | 4,699 |
// ---------------------------------------------------------------------
//
// Copyright (C) 2016 by the deal.II authors
//
// This file is part of the deal.II library.
//
// The deal.II library is free software; you can use it, redistribute
// it, and/or modify it under the terms of the GNU Lesser General
// Public License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
// The full text of the license can be found in the file LICENSE at
// the top level of the deal.II distribution.
//
// ---------------------------------------------------------------------
// Test Legendre expansion in 2D and 3D for a function given using Legendre
// coefficients.
#include "../tests.h"
#include <iostream>
#include <deal.II/base/function.h>
#include <deal.II/base/quadrature_lib.h>
#include <deal.II/grid/tria.h>
#include <deal.II/grid/grid_generator.h>
#include <deal.II/hp/dof_handler.h>
#include <deal.II/fe/fe_series.h>
#include <deal.II/fe/fe_q.h>
#include <deal.II/lac/vector.h>
#include <deal.II/hp/q_collection.h>
#include <deal.II/numerics/vector_tools.h>
#include <gsl/gsl_sf_legendre.h>
using namespace dealii;
template <int dim>
class LegendreFunction : public Function<dim>
{
public:
LegendreFunction(const Table<dim,double> &coefficients)
:
dealii::Function<dim>(1),
coefficients(coefficients)
{
}
virtual double value(const Point<dim> &point,
const unsigned int component = 0 ) const;
const Table<dim,double> &get_coefficients() const
{
return coefficients;
}
private:
const Table<dim,double> coefficients;
};
// copy-paste from fe_series.cc
template <int dim>
double Lh(const Point<dim> &x_q,
const TableIndices<dim> &indices)
{
double res = 1.0;
for (unsigned int d = 0; d < dim; d++)
{
const double x = 2.0*(x_q[d]-0.5);
Assert ( (x_q[d] <= 1.0) && (x_q[d] >= 0.),
ExcMessage("x_q is not in [0,1]" +
Utilities::to_string(x_q[d])));
const int ind = indices[d];
res *= sqrt(2.0) * gsl_sf_legendre_Pl (ind, x);
}
return res;
}
template <>
double LegendreFunction<2>::value(const dealii::Point<2> &point,
const unsigned int ) const
{
double f = 0.0;
for (unsigned int i = 0; i < coefficients.size(0); i++)
for (unsigned int j = 0; j < coefficients.size(1); j++)
f+= Lh(point, TableIndices<2>(i,j)) * coefficients(i,j);
return f;
}
template <>
double LegendreFunction<3>::value(const dealii::Point<3> &point,
const unsigned int ) const
{
double f = 0.0;
for (unsigned int i = 0; i < coefficients.size(0); i++)
for (unsigned int j = 0; j < coefficients.size(1); j++)
for (unsigned int k = 0; k < coefficients.size(2); k++)
f+= Lh(point, TableIndices<3>(i,j,k)) * coefficients(i,j,k);
return f;
}
void print(const Table<2,double> &coeff)
{
for (unsigned int i = 0; i < coeff.size(0); i++)
for (unsigned int j = 0; j < coeff.size(1); j++)
deallog << coeff(i,j) << " ";
deallog << std::endl;
}
void print(const Table<3,double> &coeff)
{
for (unsigned int i = 0; i < coeff.size(0); i++)
for (unsigned int j = 0; j < coeff.size(1); j++)
for (unsigned int k = 0; k < coeff.size(2); k++)
deallog << coeff(i,j,k) << " ";
deallog << std::endl;
}
void resize(Table<2,double> &coeff, const unsigned int N)
{
coeff.reinit(N,N);
}
void resize(Table<3,double> &coeff, const unsigned int N)
{
TableIndices<3> size;
for (unsigned int d=0; d<3; d++)
size[d] = N;
coeff.reinit(size);
}
template <int dim>
void test(const LegendreFunction<dim> &func,
const unsigned int poly_degree)
{
const unsigned int max_poly = poly_degree+3;
deallog <<"-----------------------------------"<<std::endl;
deallog << dim <<"d, p="<<poly_degree<<", max_p="<<max_poly<<std::endl;
deallog <<"-----------------------------------"<<std::endl;
Triangulation<dim> triangulation;
hp::DoFHandler<dim> dof_handler(triangulation);
hp::FECollection<dim> fe_collection;
hp::QCollection<dim> quadrature_formula;
// add some extra FEs in fe_collection
for (unsigned int p = 1; p <= max_poly; p++)
{
fe_collection.push_back(FE_Q<dim>(p));
quadrature_formula.push_back(QGauss<dim>(p+1+5));
}
GridGenerator::hyper_cube (triangulation,0.0,1.0); // reference cell
const unsigned int fe_index = poly_degree-1;
dof_handler.begin_active()->set_active_fe_index(fe_index);
dof_handler.distribute_dofs (fe_collection);
Vector<double> values(dof_handler.n_dofs());
VectorTools::interpolate (dof_handler,func,values);
const unsigned int N = poly_degree+1;
FESeries::Legendre<dim> legendre(N,
fe_collection,
quadrature_formula);
const Table<dim,double> &coeff_in = func.get_coefficients();
Table<dim,double> coeff_out;
resize(coeff_out,N);
Vector<double> local_dof_values;
typename hp::DoFHandler<dim>::active_cell_iterator
cell = dof_handler.begin_active();
{
const unsigned int cell_n_dofs = cell->get_fe().dofs_per_cell;
const unsigned int cell_active_fe_index = cell->active_fe_index();
local_dof_values.reinit (cell_n_dofs);
cell->get_dof_values (values, local_dof_values);
legendre.calculate(local_dof_values,
cell_active_fe_index,
coeff_out);
}
deallog << "calculated:" << std::endl;
print(coeff_out);
deallog <<"exact:"<<std::endl;
print(coeff_in);
dof_handler.clear();
}
int main ()
{
std::ofstream logfile("output");
dealii::deallog.attach(logfile,/*do not print job id*/false);
dealii::deallog.depth_console(0);
{
const unsigned int dim = 2;
const unsigned int coeff_1d = 2;
const unsigned int p = 1;
Table<dim,double> coeff_in(coeff_1d,coeff_1d);
unsigned int ind = 0;
for (unsigned int i = 0; i < coeff_1d; i++)
for (unsigned int j = 0; j < coeff_1d; j++)
coeff_in(i,j) = 1.0 + ind++;
LegendreFunction<dim> function(coeff_in);
test(function,p);
}
{
const unsigned int dim = 2;
const unsigned int coeff_1d = 3;
const unsigned int p = 2;
Table<dim,double> coeff_in(coeff_1d,coeff_1d);
unsigned int ind = 0;
for (unsigned int i = 0; i < coeff_1d; i++)
for (unsigned int j = 0; j < coeff_1d; j++)
coeff_in(i,j) = 1.0 + ind++;
LegendreFunction<dim> function(coeff_in);
test(function,p);
}
{
const unsigned int dim = 3;
const unsigned int coeff_1d = 2;
const unsigned int p = 1;
Table<dim,double> coeff_in(coeff_1d,coeff_1d,coeff_1d);
unsigned int ind = 0;
for (unsigned int i = 0; i < coeff_1d; i++)
for (unsigned int j = 0; j < coeff_1d; j++)
for (unsigned int k = 0; k < coeff_1d; k++)
coeff_in(i,j,k) = 1.0 + ind++;
LegendreFunction<dim> function(coeff_in);
test(function,p);
}
{
const unsigned int dim = 3;
const unsigned int coeff_1d = 3;
const unsigned int p = 2;
Table<dim,double> coeff_in(coeff_1d,coeff_1d,coeff_1d);
unsigned int ind = 0;
for (unsigned int i = 0; i < coeff_1d; i++)
for (unsigned int j = 0; j < coeff_1d; j++)
for (unsigned int k = 0; k < coeff_1d; k++)
coeff_in(i,j,k) = 1.0 + ind++;
LegendreFunction<dim> function(coeff_in);
test(function,p);
}
dealii::deallog << "Ok"<<std::endl;
}
| kalj/dealii | tests/fe/fe_series_05.cc | C++ | lgpl-2.1 | 7,546 |
/*
* eXist Open Source Native XML Database
* Copyright (C) 2001-07 The eXist Project
* http://exist-db.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
* $Id$
*/
package org.exist.management;
import org.exist.management.impl.PerInstanceMBean;
import org.exist.storage.BrokerPool;
import org.exist.util.DatabaseConfigurationException;
/**
* A dummy agent which will be used if JMX is disabled. It just acts as an empty
* placeholder.
*/
public class DummyAgent implements Agent {
@Override
public void initDBInstance(final BrokerPool instance) {
// do nothing
}
@Override
public void closeDBInstance(final BrokerPool instance) {
// nothing to do
}
@Override
public void addMBean(final PerInstanceMBean mbean) throws DatabaseConfigurationException {
// just do nothing
}
@Override
public void changeStatus(final BrokerPool instance, final TaskStatus actualStatus) {
// nothing to do
}
@Override
public void updateStatus(final BrokerPool instance, final int percentage) {
// nothing to do
}
}
| ljo/exist | src/org/exist/management/DummyAgent.java | Java | lgpl-2.1 | 1,815 |
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.management.subsystems;
import java.io.IOException;
/**
* @author Andy
*
*/
public class LuceneChildApplicationContextFactory extends ChildApplicationContextFactory
{
/* (non-Javadoc)
* @see org.alfresco.repo.management.subsystems.ChildApplicationContextFactory#createInitialState()
*/
@Override
protected PropertyBackedBeanState createInitialState() throws IOException
{
return new ApplicationContextState(true);
}
protected void destroy(boolean isPermanent)
{
super.destroy(isPermanent);
doInit();
}
}
| Alfresco/alfresco-repository | src/main/java/org/alfresco/repo/management/subsystems/LuceneChildApplicationContextFactory.java | Java | lgpl-3.0 | 1,669 |
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.template;
import java.io.StringReader;
import org.alfresco.error.AlfrescoRuntimeException;
import org.alfresco.model.ContentModel;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.service.cmr.repository.ContentReader;
import org.alfresco.service.cmr.repository.NodeRef;
import org.dom4j.Document;
import org.dom4j.Element;
import org.dom4j.io.SAXReader;
/**
* Provides functionality to execute a Lucene search string and return TemplateNode objects.
*
* @author Kevin Roast
*/
public class LuceneSearchResultsMap extends BaseSearchResultsMap
{
/**
* Constructor
*
* @param parent The parent TemplateNode to execute searches from
* @param services The ServiceRegistry to use
*/
public LuceneSearchResultsMap(TemplateNode parent, ServiceRegistry services)
{
super(parent, services);
}
/**
* @see org.alfresco.repo.template.BaseTemplateMap#get(java.lang.Object)
*/
public Object get(Object key)
{
// execute the search
return query(key.toString());
}
}
| Alfresco/alfresco-repository | src/main/java/org/alfresco/repo/template/LuceneSearchResultsMap.java | Java | lgpl-3.0 | 2,177 |
#
# This file is part of Dragonfly.
# (c) Copyright 2007, 2008 by Christo Butcher
# Licensed under the LGPL.
#
# Dragonfly is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Dragonfly is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with Dragonfly. If not, see
# <http://www.gnu.org/licenses/>.
#
"""
Arabic language implementations of Integer and Digits classes
============================================================================
"""
from ..base.integer_internal import (MapIntBuilder, CollectionIntBuilder,
MagnitudeIntBuilder, IntegerContentBase)
from ..base.digits_internal import DigitsContentBase
#---------------------------------------------------------------------------
int_0 = MapIntBuilder({
"صفر": 0,
})
int_1_9 = MapIntBuilder({
"واحد": 1,
"اثنان": 2,
"ثلاثة": 3,
"اربعة": 4,
"خمسة": 5,
"ستة": 6,
"سبعة": 7,
"ثمانية": 8,
"تسعة": 9,
})
int_10_19 = MapIntBuilder({
"عشرة": 10,
"احدى عشر": 11,
"اثنا عشر": 12,
"ثلاثة عشر": 13,
"اربعة عشر": 14,
"خمسة عشر": 15,
"ستة عشر": 16,
"سبعة عشر": 17,
"ثمانية عشر": 18,
"تسعة عشر": 19,
})
int_20_90_10 = MapIntBuilder({
"عشرون": 2,
"ثلاثون": 3,
"اربعون": 4,
"خمسون": 5,
"ستون": 6,
"سبعون": 7,
"ثمانون": 8,
"تسعون": 9,
})
int_20_99 = MagnitudeIntBuilder(
factor = 10,
spec = "<multiplier> [<remainder>]",
multipliers = [int_20_90_10],
remainders = [int_1_9],
)
int_and_1_99 = CollectionIntBuilder(
spec = "[و] <element>",
set = [int_1_9, int_10_19, int_20_99],
)
int_100s = MagnitudeIntBuilder(
factor = 100,
spec = "[<multiplier>] hundred [<remainder>]",
multipliers = [int_1_9],
remainders = [int_and_1_99],
)
int_100big = MagnitudeIntBuilder(
factor = 100,
spec = "[<multiplier>] hundred [<remainder>]",
multipliers = [int_10_19, int_20_99],
remainders = [int_و_1_99]
)
int_1000s = MagnitudeIntBuilder(
factor = 1000,
spec = "[<multiplier>] thousand [<remainder>]",
multipliers = [int_1_9, int_10_19, int_20_99, int_100s],
remainders = [int_و_1_99, int_100s]
)
int_1000000s = MagnitudeIntBuilder(
factor = 1000000,
spec = "[<multiplier>] million [<remainder>]",
multipliers = [int_1_9, int_10_19, int_20_99, int_100s, int_1000s],
remainders = [int_و_1_99, int_100s, int_1000s],
)
#---------------------------------------------------------------------------
class IntegerContent(IntegerContentBase):
builders = [int_0, int_1_9, int_10_19, int_20_99,
int_100s, int_100big, int_1000s, int_1000000s]
class DigitsContent(DigitsContentBase):
digits = [("صفر", "اووه"), "واحد", "اثنان", "ثلاثة", "اربعة",
"خمسة", "ستة", "سبعة", "ثمانية", "تسعة"] | summermk/dragonfly | dragonfly/language/other/number_arabic.py | Python | lgpl-3.0 | 5,267 |
"""
=====================
SVM: Weighted samples
=====================
Plot decision function of a weighted dataset, where the size of points
is proportional to its weight.
"""
print __doc__
import numpy as np
import pylab as pl
from sklearn import svm
# we create 20 points
np.random.seed(0)
X = np.r_[np.random.randn(10, 2) + [1, 1], np.random.randn(10, 2)]
Y = [1] * 10 + [-1] * 10
sample_weight = 100 * np.abs(np.random.randn(20))
# and assign a bigger weight to the last 10 samples
sample_weight[:10] *= 10
# # fit the model
clf = svm.SVC()
clf.fit(X, Y, sample_weight=sample_weight)
# plot the decision function
xx, yy = np.meshgrid(np.linspace(-4, 5, 500), np.linspace(-4, 5, 500))
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
# plot the line, the points, and the nearest vectors to the plane
pl.contourf(xx, yy, Z, alpha=0.75, cmap=pl.cm.bone)
pl.scatter(X[:, 0], X[:, 1], c=Y, s=sample_weight, alpha=0.9, cmap=pl.cm.bone)
pl.axis('off')
pl.show()
| seckcoder/lang-learn | python/sklearn/examples/svm/plot_weighted_samples.py | Python | unlicense | 999 |
import urllib
import urlparse
def get_path(url):
scheme, host, path, query, fragment = urlparse.urlsplit(url)
return path
def get_host(url):
scheme, host, path, query, fragment = urlparse.urlsplit(url)
return host
def add_path(url, new_path):
"""Given a url and path, return a new url that combines
the two.
"""
scheme, host, path, query, fragment = urlparse.urlsplit(url)
new_path = new_path.lstrip('/')
if path.endswith('/'):
path += new_path
else:
path += '/' + new_path
return urlparse.urlunsplit([scheme, host, path, query, fragment])
def _query_param(key, value):
"""ensure that a query parameter's value is a string
of bytes in UTF-8 encoding.
"""
if isinstance(value, unicode):
pass
elif isinstance(value, str):
value = value.decode('utf-8')
else:
value = unicode(value)
return key, value.encode('utf-8')
def _make_query_tuples(params):
if hasattr(params, 'items'):
return [_query_param(*param) for param in params.items()]
else:
return [_query_param(*params)]
def add_query_params(url, params):
"""use the _update_query_params function to set a new query
string for the url based on params.
"""
return update_query_params(url, params, update=False)
def update_query_params(url, params, update=True):
"""Given a url and a tuple or dict of parameters, return
a url that includes the parameters as a properly formatted
query string.
If update is True, change any existing values to new values
given in params.
"""
scheme, host, path, query, fragment = urlparse.urlsplit(url)
# urlparse.parse_qsl gives back url-decoded byte strings. Leave these as
# they are: they will be re-urlencoded below
query_bits = [(k, v) for k, v in urlparse.parse_qsl(query)]
if update:
query_bits = dict(query_bits)
query_bits.update(_make_query_tuples(params))
else:
query_bits.extend(_make_query_tuples(params))
query = urllib.urlencode(query_bits)
return urlparse.urlunsplit([scheme, host, path, query, fragment])
| c-oreills/pyFaceGraph | src/facegraph/url_operations.py | Python | unlicense | 2,148 |
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
from getpass import getuser
import ctypes
from ctypes.util import find_library
from ctypes import c_void_p, c_uint32, POINTER, c_bool, byref
from .core_foundation import CoreFoundation, unicode_to_cfstring, cfstring_to_unicode
from .._types import str_cls, type_name
od_path = find_library('OpenDirectory')
OpenDirectory = ctypes.CDLL(od_path, use_errno=True)
ODAttributeType = CoreFoundation.CFStringRef
ODMatchType = c_uint32
ODRecordType = CoreFoundation.CFStringRef
ODSessionRef = c_void_p
ODNodeRef = c_void_p
ODQueryRef = c_void_p
ODRecordRef = c_void_p
OpenDirectory.ODSessionCreate.argtypes = [
CoreFoundation.CFAllocatorRef,
CoreFoundation.CFDictionaryRef,
POINTER(CoreFoundation.CFErrorRef)
]
OpenDirectory.ODSessionCreate.restype = ODSessionRef
OpenDirectory.ODNodeCreateWithName.argtypes = [
CoreFoundation.CFAllocatorRef,
ODSessionRef,
CoreFoundation.CFStringRef,
POINTER(CoreFoundation.CFErrorRef)
]
OpenDirectory.ODNodeCreateWithName.restype = ODNodeRef
OpenDirectory.ODQueryCreateWithNode.argtypes = [
CoreFoundation.CFAllocatorRef,
ODNodeRef,
CoreFoundation.CFTypeRef,
ODAttributeType,
ODMatchType,
CoreFoundation.CFTypeRef,
CoreFoundation.CFTypeRef,
CoreFoundation.CFIndex,
POINTER(CoreFoundation.CFErrorRef)
]
OpenDirectory.ODQueryCreateWithNode.restype = ODQueryRef
OpenDirectory.ODQueryCopyResults.argtypes = [
ODQueryRef,
c_bool,
POINTER(CoreFoundation.CFErrorRef)
]
OpenDirectory.ODQueryCopyResults.restype = CoreFoundation.CFArrayRef
OpenDirectory.ODRecordCopyValues.argtypes = [
ODRecordRef,
ODAttributeType,
POINTER(CoreFoundation.CFErrorRef)
]
OpenDirectory.ODRecordCopyValues.restype = CoreFoundation.CFArrayRef
kODMatchEqualTo = ODMatchType(0x2001)
kODRecordTypeUsers = ODRecordType.in_dll(OpenDirectory, 'kODRecordTypeUsers')
kODAttributeTypeRecordName = ODAttributeType.in_dll(OpenDirectory, 'kODAttributeTypeRecordName')
kODAttributeTypeUserShell = ODAttributeType.in_dll(OpenDirectory, 'kODAttributeTypeUserShell')
_login_shells = {}
def get_user_login_shell(username=None):
"""
Uses OS X's OpenDirectory.framework to get the user's login shell
:param username:
A unicode string of the user to get the shell for - None for the
current user
:return:
A unicode string of the user's login shell
"""
if username is None:
username = getuser()
if not isinstance(username, str_cls):
username = username.decode('utf-8')
if not isinstance(username, str_cls):
raise TypeError('username must be a unicode string, not %s' % type_name(username))
if username not in _login_shells:
error_ref = CoreFoundation.CFErrorRef()
session = OpenDirectory.ODSessionCreate(
CoreFoundation.kCFAllocatorDefault,
None,
byref(error_ref)
)
if bool(error_ref):
raise OSError('Error!')
node = OpenDirectory.ODNodeCreateWithName(
CoreFoundation.kCFAllocatorDefault,
session,
unicode_to_cfstring("/Local/Default"),
byref(error_ref)
)
if bool(error_ref):
raise OSError('Error!')
query = OpenDirectory.ODQueryCreateWithNode(
CoreFoundation.kCFAllocatorDefault,
node,
kODRecordTypeUsers,
kODAttributeTypeRecordName,
kODMatchEqualTo,
unicode_to_cfstring(username),
kODAttributeTypeUserShell,
1,
byref(error_ref)
)
if bool(error_ref):
raise OSError('Error!')
results = OpenDirectory.ODQueryCopyResults(
query,
False,
byref(error_ref)
)
if bool(error_ref):
raise OSError('Error!')
login_shell = None
num_results = CoreFoundation.CFArrayGetCount(results)
if num_results == 1:
od_record = CoreFoundation.CFArrayGetValueAtIndex(results, 0)
attributes = OpenDirectory.ODRecordCopyValues(od_record, kODAttributeTypeUserShell, byref(error_ref))
if bool(error_ref):
raise OSError('Error!')
num_attributes = CoreFoundation.CFArrayGetCount(results)
if num_attributes == 1:
string_ref = CoreFoundation.CFArrayGetValueAtIndex(attributes, 0)
login_shell = cfstring_to_unicode(string_ref)
_login_shells[username] = login_shell
return _login_shells.get(username)
| EnTeQuAk/dotfiles | sublime-text-3/Packages/shellenv/all/shellenv/_osx/open_directory.py | Python | unlicense | 4,675 |
/*
* Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.mgt.util;
import org.apache.axiom.om.util.Base64;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.neethi.Policy;
import org.apache.neethi.PolicyEngine;
import org.wso2.carbon.CarbonConstants;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.identity.base.IdentityException;
import org.wso2.carbon.identity.mgt.IdentityMgtConfig;
import org.wso2.carbon.identity.mgt.constants.IdentityMgtConstants;
import org.wso2.carbon.identity.mgt.dto.UserDTO;
import org.wso2.carbon.identity.mgt.internal.IdentityMgtServiceComponent;
import org.wso2.carbon.registry.core.RegistryConstants;
import org.wso2.carbon.registry.core.Resource;
import org.wso2.carbon.registry.core.exceptions.RegistryException;
import org.wso2.carbon.registry.core.session.UserRegistry;
import org.wso2.carbon.user.api.Tenant;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.user.api.UserStoreManager;
import org.wso2.carbon.user.core.UserCoreConstants;
import org.wso2.carbon.user.core.service.RealmService;
import org.wso2.carbon.user.core.tenant.TenantManager;
import org.wso2.carbon.user.core.util.UserCoreUtil;
import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
import java.io.ByteArrayInputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.HashMap;
import java.util.Map;
/**
*
*/
public class Utils {
private static final Log log = LogFactory.getLog(Utils.class);
private Utils() {
}
public static UserDTO processUserId(String userId) throws IdentityException {
if (userId == null || userId.trim().length() < 1) {
throw IdentityException.error("Can not proceed with out a user id");
}
UserDTO userDTO = new UserDTO(userId);
if (!IdentityMgtConfig.getInstance().isSaasEnabled()) {
validateTenant(userDTO);
}
userDTO.setTenantId(getTenantId(userDTO.getTenantDomain()));
return userDTO;
}
public static void validateTenant(UserDTO user) throws IdentityException {
if (user.getTenantDomain() != null && !user.getTenantDomain().isEmpty()) {
if (!user.getTenantDomain().equals(
PrivilegedCarbonContext.getThreadLocalCarbonContext()
.getTenantDomain())) {
throw IdentityException.error(
"Failed access to unauthorized tenant domain");
}
user.setTenantId(getTenantId(user.getTenantDomain()));
}
}
/**
* gets no of verified user challenges
*
* @param userDTO bean class that contains user and tenant Information
* @return no of verified challenges
* @throws IdentityException if fails
*/
public static int getVerifiedChallenges(UserDTO userDTO) throws IdentityException {
int noOfChallenges = 0;
try {
UserRegistry registry = IdentityMgtServiceComponent.getRegistryService().
getConfigSystemRegistry(MultitenantConstants.SUPER_TENANT_ID);
String identityKeyMgtPath = IdentityMgtConstants.IDENTITY_MANAGEMENT_CHALLENGES +
RegistryConstants.PATH_SEPARATOR + userDTO.getUserId() +
RegistryConstants.PATH_SEPARATOR + userDTO.getUserId();
Resource resource;
if (registry.resourceExists(identityKeyMgtPath)) {
resource = registry.get(identityKeyMgtPath);
String property = resource.getProperty(IdentityMgtConstants.VERIFIED_CHALLENGES);
if (property != null) {
return Integer.parseInt(property);
}
}
} catch (RegistryException e) {
log.error("Error while processing userKey", e);
}
return noOfChallenges;
}
/**
* gets the tenant id from the tenant domain
*
* @param domain - tenant domain name
* @return tenantId
* @throws IdentityException if fails or tenant doesn't exist
*/
public static int getTenantId(String domain) throws IdentityException {
int tenantId;
TenantManager tenantManager = IdentityMgtServiceComponent.getRealmService().getTenantManager();
if (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(domain)) {
tenantId = MultitenantConstants.SUPER_TENANT_ID;
if (log.isDebugEnabled()) {
String msg = "Domain is not defined implicitly. So it is Super Tenant domain.";
log.debug(msg);
}
} else {
try {
tenantId = tenantManager.getTenantId(domain);
if (tenantId < 1 && tenantId != MultitenantConstants.SUPER_TENANT_ID) {
String msg = "This action can not be performed by the users in non-existing domains.";
log.error(msg);
throw IdentityException.error(msg);
}
} catch (org.wso2.carbon.user.api.UserStoreException e) {
String msg = "Error in retrieving tenant id of tenant domain: " + domain + ".";
log.error(msg, e);
throw IdentityException.error(msg, e);
}
}
return tenantId;
}
/**
* Get the claims from the user store manager
*
* @param userName user name
* @param tenantId tenantId
* @param claim claim name
* @return claim value
* @throws IdentityException if fails
*/
public static String getClaimFromUserStoreManager(String userName, int tenantId, String claim)
throws IdentityException {
org.wso2.carbon.user.core.UserStoreManager userStoreManager = null;
RealmService realmService = IdentityMgtServiceComponent.getRealmService();
String claimValue = "";
try {
if (realmService.getTenantUserRealm(tenantId) != null) {
userStoreManager = (org.wso2.carbon.user.core.UserStoreManager) realmService.getTenantUserRealm(tenantId).
getUserStoreManager();
}
} catch (Exception e) {
String msg = "Error retrieving the user store manager for tenant id : " + tenantId;
log.error(msg, e);
throw IdentityException.error(msg, e);
}
try {
if (userStoreManager != null) {
Map<String, String> claimsMap = userStoreManager
.getUserClaimValues(userName, new String[]{claim}, UserCoreConstants.DEFAULT_PROFILE);
if (claimsMap != null && !claimsMap.isEmpty()) {
claimValue = claimsMap.get(claim);
}
}
return claimValue;
} catch (Exception e) {
String msg = "Unable to retrieve the claim for user : " + userName;
log.error(msg, e);
throw IdentityException.error(msg, e);
}
}
public static Map<String,String> getClaimsFromUserStoreManager(String userName, int tenantId, String[] claims)
throws IdentityException {
Map<String, String> claimValues = new HashMap<>();
org.wso2.carbon.user.core.UserStoreManager userStoreManager = null;
RealmService realmService = IdentityMgtServiceComponent.getRealmService();
try {
if (realmService.getTenantUserRealm(tenantId) != null) {
userStoreManager = (org.wso2.carbon.user.core.UserStoreManager) realmService.getTenantUserRealm(tenantId).
getUserStoreManager();
}
} catch (UserStoreException e) {
throw IdentityException.error("Error retrieving the user store manager for tenant id : " + tenantId, e);
}
try {
if (userStoreManager != null) {
claimValues = userStoreManager.getUserClaimValues(userName, claims, UserCoreConstants.DEFAULT_PROFILE);
}
} catch (Exception e) {
throw IdentityException.error("Unable to retrieve the claim for user : " + userName, e);
}
return claimValues;
}
/**
* get email address from user store
*
* @param userName user name
* @param tenantId tenant id
* @return email address
*/
public static String getEmailAddressForUser(String userName, int tenantId) {
String email = null;
try {
if (log.isDebugEnabled()) {
log.debug("Retrieving email address from user profile.");
}
Tenant tenant = IdentityMgtServiceComponent.getRealmService().
getTenantManager().getTenant(tenantId);
if (tenant != null && tenant.getAdminName().equals(userName)) {
email = tenant.getEmail();
}
if (email == null || email.trim().length() < 1) {
email = getClaimFromUserStoreManager(userName, tenantId,
UserCoreConstants.ClaimTypeURIs.EMAIL_ADDRESS);
}
if ((email == null || email.trim().length() < 1) && MultitenantUtils.isEmailUserName()) {
email = UserCoreUtil.removeDomainFromName(userName);
}
} catch (Exception e) {
String msg = "Unable to retrieve an email address associated with the given user : " + userName;
log.warn(msg, e); // It is common to have users with no email address defined.
}
return email;
}
/**
* Update Password with the user input
*
* @return true - if password was successfully reset
* @throws IdentityException
*/
public static boolean updatePassword(String userId, int tenantId, String password) throws IdentityException {
String tenantDomain = null;
if (userId == null || userId.trim().length() < 1 ||
password == null || password.trim().length() < 1) {
String msg = "Unable to find the required information for updating password";
log.error(msg);
throw IdentityException.error(msg);
}
try {
UserStoreManager userStoreManager = IdentityMgtServiceComponent.
getRealmService().getTenantUserRealm(tenantId).getUserStoreManager();
userStoreManager.updateCredentialByAdmin(userId, password);
if (log.isDebugEnabled()) {
String msg = "Password is updated for user: " + userId;
log.debug(msg);
}
return true;
} catch (UserStoreException e) {
String msg = "Error in changing the password, user name: " + userId + " domain: " +
tenantDomain + ".";
log.error(msg, e);
throw IdentityException.error(msg, e);
}
}
/**
* @param value
* @return
* @throws UserStoreException
*/
public static String doHash(String value) throws UserStoreException {
try {
String digsestFunction = "SHA-256";
MessageDigest dgst = MessageDigest.getInstance(digsestFunction);
byte[] byteValue = dgst.digest(value.getBytes());
return Base64.encode(byteValue);
} catch (NoSuchAlgorithmException e) {
log.error(e.getMessage(), e);
throw new UserStoreException(e.getMessage(), e);
}
}
/**
* Set claim to user store manager
*
* @param userName user name
* @param tenantId tenant id
* @param claim claim uri
* @param value claim value
* @throws IdentityException if fails
*/
public static void setClaimInUserStoreManager(String userName, int tenantId, String claim,
String value) throws IdentityException {
org.wso2.carbon.user.core.UserStoreManager userStoreManager = null;
RealmService realmService = IdentityMgtServiceComponent.getRealmService();
try {
if (realmService.getTenantUserRealm(tenantId) != null) {
userStoreManager = (org.wso2.carbon.user.core.UserStoreManager) realmService.getTenantUserRealm(tenantId).
getUserStoreManager();
}
} catch (Exception e) {
String msg = "Error retrieving the user store manager for the tenant";
log.error(msg, e);
throw IdentityException.error(msg, e);
}
try {
if (userStoreManager != null) {
String oldValue = userStoreManager.getUserClaimValue(userName, claim, null);
if (oldValue == null || !oldValue.equals(value)) {
Map<String,String> claimMap = new HashMap<String,String>();
claimMap.put(claim, value);
userStoreManager.setUserClaimValues(userName, claimMap, UserCoreConstants.DEFAULT_PROFILE);
}
}
} catch (Exception e) {
String msg = "Unable to set the claim for user : " + userName;
log.error(msg, e);
throw IdentityException.error(msg, e);
}
}
public static String getUserStoreDomainName(String userName) {
int index;
String userDomain;
if ((index = userName.indexOf(CarbonConstants.DOMAIN_SEPARATOR)) >= 0) {
// remove domain name if exist
userDomain = userName.substring(0, index);
} else {
userDomain = UserCoreConstants.PRIMARY_DEFAULT_DOMAIN_NAME;
}
return userDomain;
}
public static String[] getChallengeUris() {
//TODO
return new String[]{IdentityMgtConstants.DEFAULT_CHALLENGE_QUESTION_URI01,
IdentityMgtConstants.DEFAULT_CHALLENGE_QUESTION_URI02};
}
public static Policy getSecurityPolicy() {
String policyString = " <wsp:Policy wsu:Id=\"UTOverTransport\" xmlns:wsp=\"http://schemas.xmlsoap.org/ws/2004/09/policy\"\n" +
" xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">\n" +
" <wsp:ExactlyOne>\n" +
" <wsp:All>\n" +
" <sp:TransportBinding xmlns:sp=\"http://schemas.xmlsoap.org/ws/2005/07/securitypolicy\">\n" +
" <wsp:Policy>\n" +
" <sp:TransportToken>\n" +
" <wsp:Policy>\n" +
" <sp:HttpsToken RequireClientCertificate=\"true\"/>\n" +
" </wsp:Policy>\n" +
" </sp:TransportToken>\n" +
" <sp:AlgorithmSuite>\n" +
" <wsp:Policy>\n" +
" <sp:Basic256/>\n" +
" </wsp:Policy>\n" +
" </sp:AlgorithmSuite>\n" +
" <sp:Layout>\n" +
" <wsp:Policy>\n" +
" <sp:Lax/>\n" +
" </wsp:Policy>\n" +
" </sp:Layout>\n" +
" <sp:IncludeTimestamp/>\n" +
" </wsp:Policy>\n" +
" </sp:TransportBinding>\n" +
" </wsp:All>\n" +
" </wsp:ExactlyOne>\n" +
" </wsp:Policy>";
return PolicyEngine.getPolicy(new ByteArrayInputStream(policyString.getBytes()));
}
}
| wso2/carbon-identity-framework | components/identity-mgt/org.wso2.carbon.identity.mgt/src/main/java/org/wso2/carbon/identity/mgt/util/Utils.java | Java | apache-2.0 | 16,553 |
import { expect } from 'chai';
import { spec } from 'modules/yieldoneBidAdapter.js';
import { newBidder } from 'src/adapters/bidderFactory.js';
import { deepClone } from 'src/utils.js';
const ENDPOINT = 'https://y.one.impact-ad.jp/h_bid';
const USER_SYNC_URL = 'https://y.one.impact-ad.jp/push_sync';
const VIDEO_PLAYER_URL = 'https://img.ak.impact-ad.jp/ic/pone/ivt/firstview/js/dac-video-prebid.min.js';
const DEFAULT_VIDEO_SIZE = {w: 640, h: 360};
describe('yieldoneBidAdapter', function() {
const adapter = newBidder(spec);
describe('isBidRequestValid', function () {
let bid = {
'bidder': 'yieldone',
'params': {
placementId: '36891'
},
'adUnitCode': 'adunit-code',
'sizes': [[300, 250], [336, 280]],
'bidId': '23beaa6af6cdde',
'bidderRequestId': '19c0c1efdf37e7',
'auctionId': '61466567-d482-4a16-96f0-fe5f25ffbdf1',
};
it('should return true when required params found', function () {
expect(spec.isBidRequestValid(bid)).to.equal(true);
});
it('should return false when placementId not passed correctly', function () {
bid.params.placementId = '';
expect(spec.isBidRequestValid(bid)).to.equal(false);
});
it('should return false when require params are not passed', function () {
let bid = Object.assign({}, bid);
bid.params = {};
expect(spec.isBidRequestValid(bid)).to.equal(false);
});
});
describe('buildRequests', function () {
const bidderRequest = {
refererInfo: {
numIframes: 0,
reachedTop: true,
referer: 'http://example.com',
stack: ['http://example.com']
}
};
describe('Basic', function () {
const bidRequests = [
{
'bidder': 'yieldone',
'params': {placementId: '36891'},
'adUnitCode': 'adunit-code1',
'bidId': '23beaa6af6cdde',
'bidderRequestId': '19c0c1efdf37e7',
'auctionId': '61466567-d482-4a16-96f0-fe5f25ffbdf1',
},
{
'bidder': 'yieldone',
'params': {placementId: '47919'},
'adUnitCode': 'adunit-code2',
'bidId': '382091349b149f"',
'bidderRequestId': '"1f9c98192de251"',
'auctionId': '61466567-d482-4a16-96f0-fe5f25ffbdf1',
}
];
const request = spec.buildRequests(bidRequests, bidderRequest);
it('sends bid request to our endpoint via GET', function () {
expect(request[0].method).to.equal('GET');
expect(request[1].method).to.equal('GET');
});
it('attaches source and version to endpoint URL as query params', function () {
expect(request[0].url).to.equal(ENDPOINT);
expect(request[1].url).to.equal(ENDPOINT);
});
it('adUnitCode should be sent as uc parameters on any requests', function () {
expect(request[0].data.uc).to.equal('adunit-code1');
expect(request[1].data.uc).to.equal('adunit-code2');
});
});
describe('Old Format', function () {
const bidRequests = [
{
params: {placementId: '0'},
mediaType: 'banner',
sizes: [[300, 250], [336, 280]],
},
{
params: {placementId: '1'},
mediaType: 'banner',
sizes: [[336, 280]],
},
{
// It doesn't actually exist.
params: {placementId: '2'},
},
{
params: {placementId: '3'},
mediaType: 'video',
sizes: [[1280, 720], [1920, 1080]],
},
{
params: {placementId: '4'},
mediaType: 'video',
sizes: [[1920, 1080]],
},
{
params: {placementId: '5'},
mediaType: 'video',
},
];
const request = spec.buildRequests(bidRequests, bidderRequest);
it('parameter sz has more than one size on banner requests', function () {
expect(request[0].data.sz).to.equal('300x250,336x280');
expect(request[1].data.sz).to.equal('336x280');
expect(request[2].data.sz).to.equal('');
expect(request[3].data).to.not.have.property('sz');
expect(request[4].data).to.not.have.property('sz');
expect(request[5].data).to.not.have.property('sz');
});
it('width and height should be set as separate parameters on outstream requests', function () {
expect(request[0].data).to.not.have.property('w');
expect(request[1].data).to.not.have.property('w');
expect(request[2].data).to.not.have.property('w');
expect(request[3].data.w).to.equal(1280);
expect(request[3].data.h).to.equal(720);
expect(request[4].data.w).to.equal(1920);
expect(request[4].data.h).to.equal(1080);
expect(request[5].data.w).to.equal(DEFAULT_VIDEO_SIZE.w);
expect(request[5].data.h).to.equal(DEFAULT_VIDEO_SIZE.h);
});
});
describe('New Format', function () {
const bidRequests = [
{
params: {placementId: '0'},
mediaTypes: {
banner: {
sizes: [[300, 250], [336, 280]],
},
},
},
{
params: {placementId: '1'},
mediaTypes: {
banner: {
sizes: [[336, 280]],
},
},
},
{
// It doesn't actually exist.
params: {placementId: '2'},
mediaTypes: {
banner: {
},
},
},
{
params: {placementId: '3'},
mediaTypes: {
video: {
context: 'outstream',
playerSize: [[1280, 720], [1920, 1080]],
},
},
},
{
params: {placementId: '4'},
mediaTypes: {
video: {
context: 'outstream',
playerSize: [1920, 1080],
},
},
},
{
params: {placementId: '5'},
mediaTypes: {
video: {
context: 'outstream',
},
},
},
];
const request = spec.buildRequests(bidRequests, bidderRequest);
it('parameter sz has more than one size on banner requests', function () {
expect(request[0].data.sz).to.equal('300x250,336x280');
expect(request[1].data.sz).to.equal('336x280');
expect(request[2].data.sz).to.equal('');
expect(request[3].data).to.not.have.property('sz');
expect(request[4].data).to.not.have.property('sz');
expect(request[5].data).to.not.have.property('sz');
});
it('width and height should be set as separate parameters on outstream requests', function () {
expect(request[0].data).to.not.have.property('w');
expect(request[1].data).to.not.have.property('w');
expect(request[2].data).to.not.have.property('w');
expect(request[3].data.w).to.equal(1280);
expect(request[3].data.h).to.equal(720);
expect(request[4].data.w).to.equal(1920);
expect(request[4].data.h).to.equal(1080);
expect(request[5].data.w).to.equal(DEFAULT_VIDEO_SIZE.w);
expect(request[5].data.h).to.equal(DEFAULT_VIDEO_SIZE.h);
});
});
describe('Multiple Format', function () {
const bidRequests = [
{
// It will be treated as a banner.
params: {
placementId: '0',
},
mediaTypes: {
banner: {
sizes: [[300, 250], [336, 280]],
},
video: {
context: 'outstream',
playerSize: [1920, 1080],
},
},
},
{
// It will be treated as a video.
params: {
placementId: '1',
playerParams: {},
},
mediaTypes: {
banner: {
sizes: [[300, 250], [336, 280]],
},
video: {
context: 'outstream',
playerSize: [1920, 1080],
},
},
},
];
const request = spec.buildRequests(bidRequests, bidderRequest);
it('parameter sz has more than one size on banner requests', function () {
expect(request[0].data.sz).to.equal('300x250,336x280');
expect(request[1].data).to.not.have.property('sz');
});
it('width and height should be set as separate parameters on outstream requests', function () {
expect(request[0].data).to.not.have.property('w');
expect(request[1].data.w).to.equal(1920);
expect(request[1].data.h).to.equal(1080);
});
});
describe('FLUX Format', function () {
const bidRequests = [
{
// It will be treated as a banner.
params: {
placementId: '0',
},
mediaTypes: {
banner: {
sizes: [[300, 250], [336, 280]],
},
video: {
context: 'outstream',
playerSize: [[1, 1]],
},
},
},
{
// It will be treated as a video.
params: {
placementId: '1',
playerParams: {},
playerSize: [1920, 1080],
},
mediaTypes: {
banner: {
sizes: [[300, 250], [336, 280]],
},
video: {
context: 'outstream',
playerSize: [[1, 1]],
},
},
},
{
// It will be treated as a video.
params: {
placementId: '2',
playerParams: {},
},
mediaTypes: {
banner: {
sizes: [[300, 250], [336, 280]],
},
video: {
context: 'outstream',
playerSize: [[1, 1]],
},
},
},
];
const request = spec.buildRequests(bidRequests, bidderRequest);
it('parameter sz has more than one size on banner requests', function () {
expect(request[0].data.sz).to.equal('300x250,336x280');
expect(request[1].data).to.not.have.property('sz');
expect(request[2].data).to.not.have.property('sz');
});
it('width and height should be set as separate parameters on outstream requests', function () {
expect(request[0].data).to.not.have.property('w');
expect(request[1].data.w).to.equal(1920);
expect(request[1].data.h).to.equal(1080);
expect(request[2].data.w).to.equal(DEFAULT_VIDEO_SIZE.w);
expect(request[2].data.h).to.equal(DEFAULT_VIDEO_SIZE.h);
});
});
describe('LiveRampID', function () {
it('dont send LiveRampID if undefined', function () {
const bidRequests = [
{
params: {placementId: '0'},
},
{
params: {placementId: '1'},
userId: {},
},
{
params: {placementId: '2'},
userId: undefined,
},
];
const request = spec.buildRequests(bidRequests, bidderRequest);
expect(request[0].data).to.not.have.property('lr_env');
expect(request[1].data).to.not.have.property('lr_env');
expect(request[2].data).to.not.have.property('lr_env');
});
it('should send LiveRampID if available', function () {
const bidRequests = [
{
params: {placementId: '0'},
userId: {idl_env: 'idl_env_sample'},
},
];
const request = spec.buildRequests(bidRequests, bidderRequest);
expect(request[0].data.lr_env).to.equal('idl_env_sample');
});
});
describe('IMID', function () {
it('dont send IMID if undefined', function () {
const bidRequests = [
{
params: {placementId: '0'},
},
{
params: {placementId: '1'},
userId: {},
},
{
params: {placementId: '2'},
userId: undefined,
},
];
const request = spec.buildRequests(bidRequests, bidderRequest);
expect(request[0].data).to.not.have.property('imuid');
expect(request[1].data).to.not.have.property('imuid');
expect(request[2].data).to.not.have.property('imuid');
});
it('should send IMID if available', function () {
const bidRequests = [
{
params: {placementId: '0'},
userId: {imuid: 'imuid_sample'},
},
];
const request = spec.buildRequests(bidRequests, bidderRequest);
expect(request[0].data.imuid).to.equal('imuid_sample');
});
});
});
describe('interpretResponse', function () {
let bidRequestBanner = [
{
'method': 'GET',
'url': 'https://y.one.impact-ad.jp/h_bid',
'data': {
'v': 'hb1',
'p': '36891',
'sz': '300x250,336x280',
'cb': 12892917383,
'r': 'http%3A%2F%2Flocalhost%3A9876%2F%3Fid%3D74552836',
'uid': '23beaa6af6cdde',
't': 'i'
}
}
];
let serverResponseBanner = {
body: {
'adTag': '<!-- adtag -->',
'uid': '23beaa6af6cdde',
'height': 250,
'width': 300,
'cpm': 0.0536616,
'crid': '2494768',
'currency': 'JPY',
'statusMessage': 'Bid available',
'dealId': 'P1-FIX-7800-DSP-MON',
'admoain': [
'www.example.com'
]
}
};
it('should get the correct bid response for banner', function () {
let expectedResponse = [{
'requestId': '23beaa6af6cdde',
'cpm': 53.6616,
'width': 300,
'height': 250,
'creativeId': '2494768',
'dealId': 'P1-FIX-7800-DSP-MON',
'currency': 'JPY',
'netRevenue': true,
'ttl': 3000,
'referrer': '',
'meta': {
'advertiserDomains': [
'www.example.com'
]
},
'mediaType': 'banner',
'ad': '<!-- adtag -->'
}];
let result = spec.interpretResponse(serverResponseBanner, bidRequestBanner[0]);
expect(Object.keys(result[0])).to.deep.equal(Object.keys(expectedResponse[0]));
expect(result[0].mediaType).to.equal(expectedResponse[0].mediaType);
});
let serverResponseVideo = {
body: {
'uid': '23beaa6af6cdde',
'height': 360,
'width': 640,
'cpm': 0.0536616,
'dealId': 'P1-FIX-766-DSP-MON',
'crid': '2494768',
'currency': 'JPY',
'statusMessage': 'Bid available',
'adm': '<!-- vast -->'
}
};
let bidRequestVideo = [
{
'method': 'GET',
'url': 'https://y.one.impact-ad.jp/h_bid',
'data': {
'v': 'hb1',
'p': '41993',
'w': '640',
'h': '360',
'cb': 12892917383,
'r': 'http%3A%2F%2Flocalhost%3A9876%2F%3Fid%3D74552836',
'uid': '23beaa6af6cdde',
't': 'i'
}
}
];
it('should get the correct bid response for video', function () {
let expectedResponse = [{
'requestId': '23beaa6af6cdde',
'cpm': 53.6616,
'width': 640,
'height': 360,
'creativeId': '2494768',
'dealId': 'P1-FIX-7800-DSP-MON',
'currency': 'JPY',
'netRevenue': true,
'ttl': 3000,
'referrer': '',
'meta': {
'advertiserDomains': []
},
'mediaType': 'video',
'vastXml': '<!-- vast -->',
'renderer': {
id: '23beaa6af6cdde',
url: VIDEO_PLAYER_URL
}
}];
let result = spec.interpretResponse(serverResponseVideo, bidRequestVideo[0]);
expect(Object.keys(result[0])).to.deep.equal(Object.keys(expectedResponse[0]));
expect(result[0].mediaType).to.equal(expectedResponse[0].mediaType);
expect(result[0].renderer.id).to.equal(expectedResponse[0].renderer.id);
expect(result[0].renderer.url).to.equal(expectedResponse[0].renderer.url);
});
it('handles empty bid response', function () {
let response = {
body: {
'uid': '2c0b634db95a01',
'height': 0,
'crid': '',
'statusMessage': 'Bid returned empty or error response',
'width': 0,
'cpm': 0
}
};
let result = spec.interpretResponse(response, bidRequestBanner[0]);
expect(result.length).to.equal(0);
});
});
describe('getUserSyncs', function () {
it('handles empty sync options', function () {
expect(spec.getUserSyncs({})).to.be.undefined;
});
it('should return a sync url if iframe syncs are enabled', function () {
expect(spec.getUserSyncs({
'iframeEnabled': true
})).to.deep.equal([{
type: 'iframe', url: USER_SYNC_URL
}]);
});
});
});
| PubWise/Prebid.js | test/spec/modules/yieldoneBidAdapter_spec.js | JavaScript | apache-2.0 | 16,949 |
# Copyright (c) 2015 FUJITSU LIMITED
# Copyright (c) 2012 EMC Corporation.
# Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
FibreChannel Cinder Volume driver for Fujitsu ETERNUS DX S3 series.
"""
from oslo_log import log as logging
import six
from cinder import interface
from cinder.volume import driver
from cinder.volume.drivers.fujitsu import eternus_dx_common
from cinder.zonemanager import utils as fczm_utils
LOG = logging.getLogger(__name__)
@interface.volumedriver
class FJDXFCDriver(driver.FibreChannelDriver):
"""FC Cinder Volume Driver for Fujitsu ETERNUS DX S3 series."""
# ThirdPartySystems wiki page
CI_WIKI_NAME = "Fujitsu_ETERNUS_CI"
VERSION = eternus_dx_common.FJDXCommon.VERSION
def __init__(self, *args, **kwargs):
super(FJDXFCDriver, self).__init__(*args, **kwargs)
self.common = eternus_dx_common.FJDXCommon(
'fc',
configuration=self.configuration)
self.VERSION = self.common.VERSION
def check_for_setup_error(self):
pass
def create_volume(self, volume):
"""Create volume."""
LOG.debug('create_volume, '
'volume id: %s, enter method.', volume['id'])
location, metadata = self.common.create_volume(volume)
v_metadata = self._get_metadata(volume)
metadata.update(v_metadata)
LOG.debug('create_volume, info: %s, exit method.', metadata)
return {'provider_location': six.text_type(location),
'metadata': metadata}
def create_volume_from_snapshot(self, volume, snapshot):
"""Creates a volume from a snapshot."""
LOG.debug('create_volume_from_snapshot, '
'volume id: %(vid)s, snap id: %(sid)s, enter method.',
{'vid': volume['id'], 'sid': snapshot['id']})
location, metadata = (
self.common.create_volume_from_snapshot(volume, snapshot))
v_metadata = self._get_metadata(volume)
metadata.update(v_metadata)
LOG.debug('create_volume_from_snapshot, '
'info: %s, exit method.', metadata)
return {'provider_location': six.text_type(location),
'metadata': metadata}
def create_cloned_volume(self, volume, src_vref):
"""Create cloned volume."""
LOG.debug('create_cloned_volume, '
'target volume id: %(tid)s, '
'source volume id: %(sid)s, enter method.',
{'tid': volume['id'], 'sid': src_vref['id']})
location, metadata = (
self.common.create_cloned_volume(volume, src_vref))
v_metadata = self._get_metadata(volume)
metadata.update(v_metadata)
LOG.debug('create_cloned_volume, '
'info: %s, exit method.', metadata)
return {'provider_location': six.text_type(location),
'metadata': metadata}
def delete_volume(self, volume):
"""Delete volume on ETERNUS."""
LOG.debug('delete_volume, '
'volume id: %s, enter method.', volume['id'])
vol_exist = self.common.delete_volume(volume)
LOG.debug('delete_volume, '
'delete: %s, exit method.', vol_exist)
def create_snapshot(self, snapshot):
"""Creates a snapshot."""
LOG.debug('create_snapshot, '
'snap id: %(sid)s, volume id: %(vid)s, enter method.',
{'sid': snapshot['id'], 'vid': snapshot['volume_id']})
location, metadata = self.common.create_snapshot(snapshot)
LOG.debug('create_snapshot, info: %s, exit method.', metadata)
return {'provider_location': six.text_type(location)}
def delete_snapshot(self, snapshot):
"""Deletes a snapshot."""
LOG.debug('delete_snapshot, '
'snap id: %(sid)s, volume id: %(vid)s, enter method.',
{'sid': snapshot['id'], 'vid': snapshot['volume_id']})
vol_exist = self.common.delete_snapshot(snapshot)
LOG.debug('delete_snapshot, '
'delete: %s, exit method.', vol_exist)
def ensure_export(self, context, volume):
"""Driver entry point to get the export info for an existing volume."""
return
def create_export(self, context, volume, connector):
"""Driver entry point to get the export info for a new volume."""
return
def remove_export(self, context, volume):
"""Driver entry point to remove an export for a volume."""
return
@fczm_utils.AddFCZone
def initialize_connection(self, volume, connector):
"""Allow connection to connector and return connection info."""
LOG.debug('initialize_connection, volume id: %(vid)s, '
'wwpns: %(wwpns)s, enter method.',
{'vid': volume['id'], 'wwpns': connector['wwpns']})
info = self.common.initialize_connection(volume, connector)
data = info['data']
init_tgt_map = (
self.common.build_fc_init_tgt_map(connector, data['target_wwn']))
data['initiator_target_map'] = init_tgt_map
info['data'] = data
LOG.debug('initialize_connection, '
'info: %s, exit method.', info)
return info
@fczm_utils.RemoveFCZone
def terminate_connection(self, volume, connector, **kwargs):
"""Disallow connection from connector."""
LOG.debug('terminate_connection, volume id: %(vid)s, '
'wwpns: %(wwpns)s, enter method.',
{'vid': volume['id'], 'wwpns': connector['wwpns']})
map_exist = self.common.terminate_connection(volume, connector)
attached = self.common.check_attached_volume_in_zone(connector)
info = {'driver_volume_type': 'fibre_channel',
'data': {}}
if not attached:
# No more volumes attached to the host
init_tgt_map = self.common.build_fc_init_tgt_map(connector)
info['data'] = {'initiator_target_map': init_tgt_map}
LOG.debug('terminate_connection, unmap: %(unmap)s, '
'connection info: %(info)s, exit method',
{'unmap': map_exist, 'info': info})
return info
def get_volume_stats(self, refresh=False):
"""Get volume stats."""
LOG.debug('get_volume_stats, refresh: %s, enter method.', refresh)
pool_name = None
if refresh is True:
data, pool_name = self.common.update_volume_stats()
backend_name = self.configuration.safe_get('volume_backend_name')
data['volume_backend_name'] = backend_name or 'FJDXFCDriver'
data['storage_protocol'] = 'FC'
self._stats = data
LOG.debug('get_volume_stats, '
'pool name: %s, exit method.', pool_name)
return self._stats
def extend_volume(self, volume, new_size):
"""Extend volume."""
LOG.debug('extend_volume, '
'volume id: %s, enter method.', volume['id'])
used_pool_name = self.common.extend_volume(volume, new_size)
LOG.debug('extend_volume, '
'used pool name: %s, exit method.', used_pool_name)
def _get_metadata(self, volume):
v_metadata = volume.get('volume_metadata')
if v_metadata:
ret = {data['key']: data['value'] for data in v_metadata}
else:
ret = volume.get('metadata', {})
return ret
| Hybrid-Cloud/cinder | cinder/volume/drivers/fujitsu/eternus_dx_fc.py | Python | apache-2.0 | 8,064 |
#include "vocabulary.h"
namespace extractor {
Vocabulary::~Vocabulary() {}
int Vocabulary::GetTerminalIndex(const string& word) {
int word_id = -1;
#pragma omp critical (vocabulary)
{
auto it = dictionary.find(word);
if (it != dictionary.end()) {
word_id = it->second;
} else {
word_id = words.size();
dictionary[word] = word_id;
words.push_back(word);
}
}
return word_id;
}
int Vocabulary::GetNonterminalIndex(int position) {
return -position;
}
bool Vocabulary::IsTerminal(int symbol) {
return symbol >= 0;
}
string Vocabulary::GetTerminalValue(int symbol) {
string word;
#pragma omp critical (vocabulary)
word = words[symbol];
return word;
}
bool Vocabulary::operator==(const Vocabulary& other) const {
return words == other.words && dictionary == other.dictionary;
}
} // namespace extractor
| veer66/cdec | extractor/vocabulary.cc | C++ | apache-2.0 | 868 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.twitter.search;
import org.apache.camel.Consumer;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.component.twitter.AbstractTwitterEndpoint;
import org.apache.camel.component.twitter.TwitterConfiguration;
import org.apache.camel.component.twitter.TwitterHelper;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriPath;
import org.apache.camel.util.ObjectHelper;
/**
* The Twitter Search component consumes search results.
*/
@UriEndpoint(firstVersion = "2.10.0", scheme = "twitter-search", title = "Twitter Search", syntax = "twitter-search:keywords",
consumerClass = SearchConsumerHandler.class, label = "api,social")
public class TwitterSearchEndpoint extends AbstractTwitterEndpoint {
@UriPath(description = "The search keywords. Multiple values can be separated with comma.")
@Metadata(required = "true")
private String keywords;
public TwitterSearchEndpoint(String uri, String remaining, TwitterSearchComponent component, TwitterConfiguration properties) {
super(uri, component, properties);
this.keywords = remaining;
}
@Override
public Producer createProducer() throws Exception {
return new SearchProducer(this, keywords);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
return TwitterHelper.createConsumer(processor, this, new SearchConsumerHandler(this, keywords));
}
}
| yuruki/camel | components/camel-twitter/src/main/java/org/apache/camel/component/twitter/search/TwitterSearchEndpoint.java | Java | apache-2.0 | 2,347 |
$(document).ready(
function()
{
var $roles = $(".role_change");
$roles.each(
function()
{
var str = $(this).find("input").val();
var en_role_index = getRoleIndex(str,"EN");
var cn_role_str = indexToRole(en_role_index,"CN");
$(this).find("span").append(cn_role_str);
}
);
}
);
| tianfengjingjing/ZhuoHuaCMMOracle11g | WebRoot/js/admin/staff/viewStaffInfo.js | JavaScript | apache-2.0 | 367 |
/*
* Copyright 2018 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.bpmn.definition;
import java.util.Objects;
import javax.validation.Valid;
import org.jboss.errai.common.client.api.annotations.MapsTo;
import org.jboss.errai.common.client.api.annotations.Portable;
import org.jboss.errai.databinding.client.api.Bindable;
import org.kie.workbench.common.forms.adf.definitions.annotations.FieldParam;
import org.kie.workbench.common.forms.adf.definitions.annotations.FormDefinition;
import org.kie.workbench.common.forms.adf.definitions.annotations.FormField;
import org.kie.workbench.common.forms.adf.definitions.settings.FieldPolicy;
import org.kie.workbench.common.stunner.bpmn.definition.property.background.BackgroundSet;
import org.kie.workbench.common.stunner.bpmn.definition.property.dimensions.CircleDimensionSet;
import org.kie.workbench.common.stunner.bpmn.definition.property.dimensions.Radius;
import org.kie.workbench.common.stunner.bpmn.definition.property.event.compensation.CompensationEventExecutionSet;
import org.kie.workbench.common.stunner.bpmn.definition.property.font.FontSet;
import org.kie.workbench.common.stunner.bpmn.definition.property.general.BPMNGeneralSet;
import org.kie.workbench.common.stunner.core.definition.annotation.Definition;
import org.kie.workbench.common.stunner.core.definition.annotation.Property;
import org.kie.workbench.common.stunner.core.definition.annotation.morph.Morph;
import org.kie.workbench.common.stunner.core.util.HashUtil;
import static org.kie.workbench.common.forms.adf.engine.shared.formGeneration.processing.fields.fieldInitializers.nestedForms.AbstractEmbeddedFormsInitializer.COLLAPSIBLE_CONTAINER;
import static org.kie.workbench.common.forms.adf.engine.shared.formGeneration.processing.fields.fieldInitializers.nestedForms.AbstractEmbeddedFormsInitializer.FIELD_CONTAINER_PARAM;
@Portable
@Bindable
@Definition
@Morph(base = BaseEndEvent.class)
@FormDefinition(
startElement = "general",
policy = FieldPolicy.ONLY_MARKED,
defaultFieldSettings = {@FieldParam(name = FIELD_CONTAINER_PARAM, value = COLLAPSIBLE_CONTAINER)}
)
public class EndCompensationEvent extends BaseEndEvent {
@Property
@FormField(afterElement = "general")
@Valid
private CompensationEventExecutionSet executionSet;
public EndCompensationEvent() {
this(new BPMNGeneralSet(""),
new BackgroundSet(),
new FontSet(),
new CircleDimensionSet(new Radius()),
new CompensationEventExecutionSet());
}
public EndCompensationEvent(final @MapsTo("general") BPMNGeneralSet general,
final @MapsTo("backgroundSet") BackgroundSet backgroundSet,
final @MapsTo("fontSet") FontSet fontSet,
final @MapsTo("dimensionsSet") CircleDimensionSet dimensionsSet,
final @MapsTo("executionSet") CompensationEventExecutionSet executionSet) {
super(general,
backgroundSet,
fontSet,
dimensionsSet);
this.executionSet = executionSet;
}
public CompensationEventExecutionSet getExecutionSet() {
return executionSet;
}
public void setExecutionSet(CompensationEventExecutionSet executionSet) {
this.executionSet = executionSet;
}
@Override
public int hashCode() {
return HashUtil.combineHashCodes(super.hashCode(),
Objects.hashCode(executionSet));
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o instanceof EndCompensationEvent) {
EndCompensationEvent other = (EndCompensationEvent) o;
return super.equals(other) &&
Objects.equals(executionSet,
other.executionSet);
}
return false;
}
}
| jomarko/kie-wb-common | kie-wb-common-stunner/kie-wb-common-stunner-sets/kie-wb-common-stunner-bpmn/kie-wb-common-stunner-bpmn-api/src/main/java/org/kie/workbench/common/stunner/bpmn/definition/EndCompensationEvent.java | Java | apache-2.0 | 4,567 |
// Copyright 2012-present Oliver Eilhard. All rights reserved.
// Use of this source code is governed by a MIT-license.
// See http://olivere.mit-license.org/license.txt for details.
package elastic
import (
"fmt"
"net/url"
"strings"
"golang.org/x/net/context"
"gopkg.in/olivere/elastic.v5/uritemplates"
)
// IndicesDeleteService allows to delete existing indices.
//
// See https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-delete-index.html
// for details.
type IndicesDeleteService struct {
client *Client
pretty bool
index []string
timeout string
masterTimeout string
}
// NewIndicesDeleteService creates and initializes a new IndicesDeleteService.
func NewIndicesDeleteService(client *Client) *IndicesDeleteService {
return &IndicesDeleteService{
client: client,
index: make([]string, 0),
}
}
// Index adds the list of indices to delete.
// Use `_all` or `*` string to delete all indices.
func (s *IndicesDeleteService) Index(index []string) *IndicesDeleteService {
s.index = index
return s
}
// Timeout is an explicit operation timeout.
func (s *IndicesDeleteService) Timeout(timeout string) *IndicesDeleteService {
s.timeout = timeout
return s
}
// MasterTimeout specifies the timeout for connection to master.
func (s *IndicesDeleteService) MasterTimeout(masterTimeout string) *IndicesDeleteService {
s.masterTimeout = masterTimeout
return s
}
// Pretty indicates that the JSON response be indented and human readable.
func (s *IndicesDeleteService) Pretty(pretty bool) *IndicesDeleteService {
s.pretty = pretty
return s
}
// buildURL builds the URL for the operation.
func (s *IndicesDeleteService) buildURL() (string, url.Values, error) {
// Build URL
path, err := uritemplates.Expand("/{index}", map[string]string{
"index": strings.Join(s.index, ","),
})
if err != nil {
return "", url.Values{}, err
}
// Add query string parameters
params := url.Values{}
if s.pretty {
params.Set("pretty", "1")
}
if s.timeout != "" {
params.Set("timeout", s.timeout)
}
if s.masterTimeout != "" {
params.Set("master_timeout", s.masterTimeout)
}
return path, params, nil
}
// Validate checks if the operation is valid.
func (s *IndicesDeleteService) Validate() error {
var invalid []string
if len(s.index) == 0 {
invalid = append(invalid, "Index")
}
if len(invalid) > 0 {
return fmt.Errorf("missing required fields: %v", invalid)
}
return nil
}
// Do executes the operation.
func (s *IndicesDeleteService) Do(ctx context.Context) (*IndicesDeleteResponse, error) {
// Check pre-conditions
if err := s.Validate(); err != nil {
return nil, err
}
// Get URL for request
path, params, err := s.buildURL()
if err != nil {
return nil, err
}
// Get HTTP response
res, err := s.client.PerformRequest(ctx, "DELETE", path, params, nil)
if err != nil {
return nil, err
}
// Return operation response
ret := new(IndicesDeleteResponse)
if err := s.client.decoder.Decode(res.Body, ret); err != nil {
return nil, err
}
return ret, nil
}
// -- Result of a delete index request.
// IndicesDeleteResponse is the response of IndicesDeleteService.Do.
type IndicesDeleteResponse struct {
Acknowledged bool `json:"acknowledged"`
}
| dutchcoders/ares | vendor/gopkg.in/olivere/elastic.v5/indices_delete.go | GO | apache-2.0 | 3,255 |
# (c) Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Swaminathan Vasudevan, Hewlett-Packard.
#
"""VPN Utilities and helper functions."""
from neutronclient.common import exceptions
from neutronclient.i18n import _
dpd_supported_actions = ['hold', 'clear', 'restart',
'restart-by-peer', 'disabled']
dpd_supported_keys = ['action', 'interval', 'timeout']
lifetime_keys = ['units', 'value']
lifetime_units = ['seconds']
def validate_dpd_dict(dpd_dict):
for key, value in dpd_dict.items():
if key not in dpd_supported_keys:
message = _(
"DPD Dictionary KeyError: "
"Reason-Invalid DPD key : "
"'%(key)s' not in %(supported_key)s ") % {
'key': key, 'supported_key': dpd_supported_keys}
raise exceptions.CommandError(message)
if key == 'action' and value not in dpd_supported_actions:
message = _(
"DPD Dictionary ValueError: "
"Reason-Invalid DPD action : "
"'%(key_value)s' not in %(supported_action)s ") % {
'key_value': value,
'supported_action': dpd_supported_actions}
raise exceptions.CommandError(message)
if key in ('interval', 'timeout'):
try:
if int(value) <= 0:
raise ValueError()
except ValueError:
message = _(
"DPD Dictionary ValueError: "
"Reason-Invalid positive integer value: "
"'%(key)s' = %(value)s ") % {
'key': key, 'value': value}
raise exceptions.CommandError(message)
else:
dpd_dict[key] = int(value)
return
def validate_lifetime_dict(lifetime_dict):
for key, value in lifetime_dict.items():
if key not in lifetime_keys:
message = _(
"Lifetime Dictionary KeyError: "
"Reason-Invalid unit key : "
"'%(key)s' not in %(supported_key)s ") % {
'key': key, 'supported_key': lifetime_keys}
raise exceptions.CommandError(message)
if key == 'units' and value not in lifetime_units:
message = _(
"Lifetime Dictionary ValueError: "
"Reason-Invalid units : "
"'%(key_value)s' not in %(supported_units)s ") % {
'key_value': key, 'supported_units': lifetime_units}
raise exceptions.CommandError(message)
if key == 'value':
try:
if int(value) < 60:
raise ValueError()
except ValueError:
message = _(
"Lifetime Dictionary ValueError: "
"Reason-Invalid value should be at least 60:"
"'%(key_value)s' = %(value)s ") % {
'key_value': key, 'value': value}
raise exceptions.CommandError(message)
else:
lifetime_dict['value'] = int(value)
return
def lifetime_help(policy):
lifetime = _("%s lifetime attributes. "
"'units'-seconds, default:seconds. "
"'value'-non negative integer, default:3600.") % policy
return lifetime
def dpd_help(policy):
dpd = _(" %s Dead Peer Detection attributes."
" 'action'-hold,clear,disabled,restart,restart-by-peer."
" 'interval' and 'timeout' are non negative integers. "
" 'interval' should be less than 'timeout' value. "
" 'action', default:hold 'interval', default:30, "
" 'timeout', default:120.") % policy.capitalize()
return dpd
| varunarya10/python-neutronclient | neutronclient/neutron/v2_0/vpn/utils.py | Python | apache-2.0 | 4,400 |
<?php
class attachment {
var $contentid;
var $module;
var $catid;
var $attachments;
var $field;
var $imageexts = array('gif', 'jpg', 'jpeg', 'png', 'bmp');
var $uploadedfiles = array();
var $downloadedfiles = array();
var $error;
var $upload_root;
var $siteid;
var $site = array();
function __construct($module='', $catid = 0,$siteid = 0,$upload_dir = '') {
$this->catid = intval($catid);
$this->siteid = intval($siteid)== 0 ? 1 : intval($siteid);
$this->module = $module ? $module : 'content';
pc_base::load_sys_func('dir');
pc_base::load_sys_class('image','','0');
$this->upload_root = pc_base::load_config('system','upload_path');
$this->upload_func = 'copy';
$this->upload_dir = $upload_dir;
}
/**
* 附件上传方法
* @param $field 上传字段
* @param $alowexts 允许上传类型
* @param $maxsize 最大上传大小
* @param $overwrite 是否覆盖原有文件
* @param $thumb_setting 缩略图设置
* @param $watermark_enable 是否添加水印
*/
function upload($field, $alowexts = '', $maxsize = 0, $overwrite = 0,$thumb_setting = array(), $watermark_enable = 1) {
if(!isset($_FILES[$field])) {
$this->error = UPLOAD_ERR_OK;
return false;
}
if(empty($alowexts) || $alowexts == '') {
$site_setting = $this->_get_site_setting($this->siteid);
$alowexts = $site_setting['upload_allowext'];
}
$fn = $_GET['CKEditorFuncNum'] ? $_GET['CKEditorFuncNum'] : '1';
$this->field = $field;
$this->savepath = $this->upload_root.$this->upload_dir.date('Y/md/');
$this->alowexts = $alowexts;
$this->maxsize = $maxsize;
$this->overwrite = $overwrite;
$uploadfiles = array();
$description = isset($GLOBALS[$field.'_description']) ? $GLOBALS[$field.'_description'] : array();
if(is_array($_FILES[$field]['error'])) {
$this->uploads = count($_FILES[$field]['error']);
foreach($_FILES[$field]['error'] as $key => $error) {
if($error === UPLOAD_ERR_NO_FILE) continue;
if($error !== UPLOAD_ERR_OK) {
$this->error = $error;
return false;
}
$uploadfiles[$key] = array('tmp_name' => $_FILES[$field]['tmp_name'][$key], 'name' => $_FILES[$field]['name'][$key], 'type' => $_FILES[$field]['type'][$key], 'size' => $_FILES[$field]['size'][$key], 'error' => $_FILES[$field]['error'][$key], 'description'=>$description[$key],'fn'=>$fn);
}
} else {
$this->uploads = 1;
if(!$description) $description = '';
$uploadfiles[0] = array('tmp_name' => $_FILES[$field]['tmp_name'], 'name' => $_FILES[$field]['name'], 'type' => $_FILES[$field]['type'], 'size' => $_FILES[$field]['size'], 'error' => $_FILES[$field]['error'], 'description'=>$description,'fn'=>$fn);
}
if(!dir_create($this->savepath)) {
$this->error = '8';
return false;
}
if(!is_dir($this->savepath)) {
$this->error = '8';
return false;
}
@chmod($this->savepath, 0777);
if(!is_writeable($this->savepath)) {
$this->error = '9';
return false;
}
if(!$this->is_allow_upload()) {
$this->error = '13';
return false;
}
$aids = array();
foreach($uploadfiles as $k=>$file) {
$fileext = fileext($file['name']);
if($file['error'] != 0) {
$this->error = $file['error'];
return false;
}
if(!preg_match("/^(".$this->alowexts.")$/", $fileext)) {
$this->error = '10';
return false;
}
if($this->maxsize && $file['size'] > $this->maxsize) {
$this->error = '11';
return false;
}
if(!$this->isuploadedfile($file['tmp_name'])) {
$this->error = '12';
return false;
}
$temp_filename = $this->getname($fileext);
$savefile = $this->savepath.$temp_filename;
$savefile = preg_replace("/(php|phtml|php3|php4|jsp|exe|dll|asp|cer|asa|shtml|shtm|aspx|asax|cgi|fcgi|pl)(\.|$)/i", "_\\1\\2", $savefile);
$filepath = preg_replace(new_addslashes("|^".$this->upload_root."|"), "", $savefile);
if(!$this->overwrite && file_exists($savefile)) continue;
$upload_func = $this->upload_func;
if(@$upload_func($file['tmp_name'], $savefile)) {
$this->uploadeds++;
@chmod($savefile, 0644);
@unlink($file['tmp_name']);
$file['name'] = iconv("utf-8",CHARSET,$file['name']);
$file['name'] = safe_replace($file['name']);
$uploadedfile = array('filename'=>$file['name'], 'filepath'=>$filepath, 'filesize'=>$file['size'], 'fileext'=>$fileext, 'fn'=>$file['fn']);
$thumb_enable = is_array($thumb_setting) && ($thumb_setting[0] > 0 || $thumb_setting[1] > 0 ) ? 1 : 0;
$image = new image($thumb_enable,$this->siteid);
if($thumb_enable) {
$image->thumb($savefile,'',$thumb_setting[0],$thumb_setting[1]);
}
if($watermark_enable) {
$image->watermark($savefile, $savefile);
}
$aids[] = $this->add($uploadedfile);
}
}
return $aids;
}
/**
* 附件下载
* Enter description here ...
* @param $field 预留字段
* @param $value 传入下载内容
* @param $watermark 是否加入水印
* @param $ext 下载扩展名
* @param $absurl 绝对路径
* @param $basehref
*/
function download($field, $value,$watermark = '0',$ext = 'gif|jpg|jpeg|bmp|png', $absurl = '', $basehref = '')
{
global $image_d;
$this->att_db = pc_base::load_model('attachment_model');
$upload_url = pc_base::load_config('system','upload_url');
$this->field = $field;
$dir = date('Y/md/');
$uploadpath = $upload_url.$dir;
$uploaddir = $this->upload_root.$dir;
$string = new_stripslashes($value);
if(!preg_match_all("/(href|src)=([\"|']?)([^ \"'>]+\.($ext))\\2/i", $string, $matches)) return $value;
$remotefileurls = array();
foreach($matches[3] as $matche)
{
if(strpos($matche, '://') === false) continue;
dir_create($uploaddir);
$remotefileurls[$matche] = $this->fillurl($matche, $absurl, $basehref);
}
unset($matches, $string);
$remotefileurls = array_unique($remotefileurls);
$oldpath = $newpath = array();
foreach($remotefileurls as $k=>$file) {
if(strpos($file, '://') === false || strpos($file, $upload_url) !== false) continue;
$filename = fileext($file);
$file_name = basename($file);
$filename = $this->getname($filename);
$newfile = $uploaddir.$filename;
$upload_func = $this->upload_func;
if($upload_func($file, $newfile)) {
$oldpath[] = $k;
$GLOBALS['downloadfiles'][] = $newpath[] = $uploadpath.$filename;
@chmod($newfile, 0777);
$fileext = fileext($filename);
if($watermark){
watermark($newfile, $newfile,$this->siteid);
}
$filepath = $dir.$filename;
$downloadedfile = array('filename'=>$filename, 'filepath'=>$filepath, 'filesize'=>filesize($newfile), 'fileext'=>$fileext);
$aid = $this->add($downloadedfile);
$this->downloadedfiles[$aid] = $filepath;
}
}
return str_replace($oldpath, $newpath, $value);
}
/**
* 附件删除方法
* @param $where 删除sql语句
*/
function delete($where) {
$this->att_db = pc_base::load_model('attachment_model');
$result = $this->att_db->select($where);
foreach($result as $r) {
$image = $this->upload_root.$r['filepath'];
@unlink($image);
$thumbs = glob(dirname($image).'/*'.basename($image));
if($thumbs) foreach($thumbs as $thumb) @unlink($thumb);
}
return $this->att_db->delete($where);
}
/**
* 附件添加如数据库
* @param $uploadedfile 附件信息
*/
function add($uploadedfile) {
$this->att_db = pc_base::load_model('attachment_model');
$uploadedfile['module'] = $this->module;
$uploadedfile['catid'] = $this->catid;
$uploadedfile['siteid'] = $this->siteid;
$uploadedfile['userid'] = $this->userid;
$uploadedfile['uploadtime'] = SYS_TIME;
$uploadedfile['uploadip'] = ip();
$uploadedfile['status'] = pc_base::load_config('system','attachment_stat') ? 0 : 1;
$uploadedfile['authcode'] = md5($uploadedfile['filepath']);
$uploadedfile['filename'] = strlen($uploadedfile['filename'])>49 ? $this->getname($uploadedfile['fileext']) : $uploadedfile['filename'];
$uploadedfile['isimage'] = in_array($uploadedfile['fileext'], $this->imageexts) ? 1 : 0;
$aid = $this->att_db->api_add($uploadedfile);
$this->uploadedfiles[] = $uploadedfile;
return $aid;
}
function set_userid($userid) {
$this->userid = $userid;
}
/**
* 获取缩略图地址..
* @param $image 图片路径
*/
function get_thumb($image){
return str_replace('.', '_thumb.', $image);
}
/**
* 获取附件名称
* @param $fileext 附件扩展名
*/
function getname($fileext){
return date('Ymdhis').rand(100, 999).'.'.$fileext;
}
/**
* 返回附件大小
* @param $filesize 图片大小
*/
function size($filesize) {
if($filesize >= 1073741824) {
$filesize = round($filesize / 1073741824 * 100) / 100 . ' GB';
} elseif($filesize >= 1048576) {
$filesize = round($filesize / 1048576 * 100) / 100 . ' MB';
} elseif($filesize >= 1024) {
$filesize = round($filesize / 1024 * 100) / 100 . ' KB';
} else {
$filesize = $filesize . ' Bytes';
}
return $filesize;
}
/**
* 判断文件是否是通过 HTTP POST 上传的
*
* @param string $file 文件地址
* @return bool 所给出的文件是通过 HTTP POST 上传的则返回 TRUE
*/
function isuploadedfile($file) {
return is_uploaded_file($file) || is_uploaded_file(str_replace('\\\\', '\\', $file));
}
/**
* 补全网址
*
* @param string $surl 源地址
* @param string $absurl 相对地址
* @param string $basehref 网址
* @return string 网址
*/
function fillurl($surl, $absurl, $basehref = '') {
if($basehref != '') {
$preurl = strtolower(substr($surl,0,6));
if($preurl=='http://' || $preurl=='ftp://' ||$preurl=='mms://' || $preurl=='rtsp://' || $preurl=='thunde' || $preurl=='emule://'|| $preurl=='ed2k://')
return $surl;
else
return $basehref.'/'.$surl;
}
$i = 0;
$dstr = '';
$pstr = '';
$okurl = '';
$pathStep = 0;
$surl = trim($surl);
if($surl=='') return '';
$urls = @parse_url(SITE_URL);
$HomeUrl = $urls['host'];
$BaseUrlPath = $HomeUrl.$urls['path'];
$BaseUrlPath = preg_replace("/\/([^\/]*)\.(.*)$/",'/',$BaseUrlPath);
$BaseUrlPath = preg_replace("/\/$/",'',$BaseUrlPath);
$pos = strpos($surl,'#');
if($pos>0) $surl = substr($surl,0,$pos);
if($surl[0]=='/') {
$okurl = 'http://'.$HomeUrl.'/'.$surl;
} elseif($surl[0] == '.') {
if(strlen($surl)<=2) return '';
elseif($surl[0]=='/') {
$okurl = 'http://'.$BaseUrlPath.'/'.substr($surl,2,strlen($surl)-2);
} else {
$urls = explode('/',$surl);
foreach($urls as $u) {
if($u=="..") $pathStep++;
else if($i<count($urls)-1) $dstr .= $urls[$i].'/';
else $dstr .= $urls[$i];
$i++;
}
$urls = explode('/', $BaseUrlPath);
if(count($urls) <= $pathStep)
return '';
else {
$pstr = 'http://';
for($i=0;$i<count($urls)-$pathStep;$i++) {
$pstr .= $urls[$i].'/';
}
$okurl = $pstr.$dstr;
}
}
} else {
$preurl = strtolower(substr($surl,0,6));
if(strlen($surl)<7)
$okurl = 'http://'.$BaseUrlPath.'/'.$surl;
elseif($preurl=="http:/"||$preurl=='ftp://' ||$preurl=='mms://' || $preurl=="rtsp://" || $preurl=='thunde' || $preurl=='emule:'|| $preurl=='ed2k:/')
$okurl = $surl;
else
$okurl = 'http://'.$BaseUrlPath.'/'.$surl;
}
$preurl = strtolower(substr($okurl,0,6));
if($preurl=='ftp://' || $preurl=='mms://' || $preurl=='rtsp://' || $preurl=='thunde' || $preurl=='emule:'|| $preurl=='ed2k:/') {
return $okurl;
} else {
$okurl = preg_replace('/^(http:\/\/)/i','',$okurl);
$okurl = preg_replace('/\/{1,}/i','/',$okurl);
return 'http://'.$okurl;
}
}
/**
* 是否允许上传
*/
function is_allow_upload() {
if($_groupid == 1) return true;
$starttime = SYS_TIME-86400;
$site_setting = $this->_get_site_setting($this->siteid);
return ($uploads < $site_setting['upload_maxsize']);
}
/**
* 返回错误信息
*/
function error() {
$UPLOAD_ERROR = array(
0 => L('att_upload_succ'),
1 => L('att_upload_limit_ini'),
2 => L('att_upload_limit_filesize'),
3 => L('att_upload_limit_part'),
4 => L('att_upload_nofile'),
5 => '',
6 => L('att_upload_notemp'),
7 => L('att_upload_temp_w_f'),
8 => L('att_upload_create_dir_f'),
9 => L('att_upload_dir_permissions'),
10 => L('att_upload_limit_ext'),
11 => L('att_upload_limit_setsize'),
12 => L('att_upload_not_allow'),
13 => L('att_upload_limit_time'),
);
return iconv(CHARSET,"utf-8",$UPLOAD_ERROR[$this->error]);
}
/**
* ck编辑器返回
* @param $fn
* @param $fileurl 路径
* @param $message 显示信息
*/
function mkhtml($fn,$fileurl,$message) {
$str='<script type="text/javascript">window.parent.CKEDITOR.tools.callFunction('.$fn.', \''.$fileurl.'\', \''.$message.'\');</script>';
exit($str);
}
/**
* flash上传调试方法
* @param $id
*/
function uploaderror($id = 0) {
file_put_contents(PHPCMS_PATH.'xxx.txt', $id);
}
/**
* 获取站点配置信息
* @param $siteid 站点id
*/
private function _get_site_setting($siteid) {
$siteinfo = getcache('sitelist', 'commons');
return string2array($siteinfo[$siteid]['setting']);
}
}
?> | shopscor/interface | phpcms/libs/classes/attachment.class.php | PHP | apache-2.0 | 13,084 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.watcher.notification.email;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.SecureSetting;
import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.core.watcher.crypto.CryptoService;
import org.elasticsearch.xpack.watcher.notification.NotificationService;
import javax.mail.MessagingException;
import java.util.Arrays;
import java.util.List;
/**
* A component to store email credentials and handle sending email notifications.
*/
public class EmailService extends NotificationService<Account> {
private static final Setting<String> SETTING_DEFAULT_ACCOUNT =
Setting.simpleString("xpack.notification.email.default_account", Property.Dynamic, Property.NodeScope);
private static final Setting.AffixSetting<String> SETTING_PROFILE =
Setting.affixKeySetting("xpack.notification.email.account.", "profile",
(key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope));
private static final Setting.AffixSetting<Settings> SETTING_EMAIL_DEFAULTS =
Setting.affixKeySetting("xpack.notification.email.account.", "email_defaults",
(key) -> Setting.groupSetting(key + ".", Property.Dynamic, Property.NodeScope));
// settings that can be configured as smtp properties
private static final Setting.AffixSetting<Boolean> SETTING_SMTP_AUTH =
Setting.affixKeySetting("xpack.notification.email.account.", "smtp.auth",
(key) -> Setting.boolSetting(key, false, Property.Dynamic, Property.NodeScope));
private static final Setting.AffixSetting<Boolean> SETTING_SMTP_STARTTLS_ENABLE =
Setting.affixKeySetting("xpack.notification.email.account.", "smtp.starttls.enable",
(key) -> Setting.boolSetting(key, false, Property.Dynamic, Property.NodeScope));
private static final Setting.AffixSetting<Boolean> SETTING_SMTP_STARTTLS_REQUIRED =
Setting.affixKeySetting("xpack.notification.email.account.", "smtp.starttls.required",
(key) -> Setting.boolSetting(key, false, Property.Dynamic, Property.NodeScope));
private static final Setting.AffixSetting<String> SETTING_SMTP_HOST =
Setting.affixKeySetting("xpack.notification.email.account.", "smtp.host",
(key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope));
private static final Setting.AffixSetting<Integer> SETTING_SMTP_PORT =
Setting.affixKeySetting("xpack.notification.email.account.", "smtp.port",
(key) -> Setting.intSetting(key, 587, Property.Dynamic, Property.NodeScope));
private static final Setting.AffixSetting<String> SETTING_SMTP_USER =
Setting.affixKeySetting("xpack.notification.email.account.", "smtp.user",
(key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope));
private static final Setting.AffixSetting<String> SETTING_SMTP_PASSWORD =
Setting.affixKeySetting("xpack.notification.email.account.", "smtp.password",
(key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope, Property.Filtered));
private static final Setting.AffixSetting<SecureString> SETTING_SECURE_PASSWORD =
Setting.affixKeySetting("xpack.notification.email.account.", "smtp.secure_password",
(key) -> SecureSetting.secureString(key, null));
private static final Setting.AffixSetting<TimeValue> SETTING_SMTP_TIMEOUT =
Setting.affixKeySetting("xpack.notification.email.account.", "smtp.timeout",
(key) -> Setting.timeSetting(key, TimeValue.timeValueMinutes(2), Property.Dynamic, Property.NodeScope));
private static final Setting.AffixSetting<TimeValue> SETTING_SMTP_CONNECTION_TIMEOUT =
Setting.affixKeySetting("xpack.notification.email.account.", "smtp.connection_timeout",
(key) -> Setting.timeSetting(key, TimeValue.timeValueMinutes(2), Property.Dynamic, Property.NodeScope));
private static final Setting.AffixSetting<TimeValue> SETTING_SMTP_WRITE_TIMEOUT =
Setting.affixKeySetting("xpack.notification.email.account.", "smtp.write_timeout",
(key) -> Setting.timeSetting(key, TimeValue.timeValueMinutes(2), Property.Dynamic, Property.NodeScope));
private static final Setting.AffixSetting<String> SETTING_SMTP_LOCAL_ADDRESS =
Setting.affixKeySetting("xpack.notification.email.account.", "smtp.local_address",
(key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope));
private static final Setting.AffixSetting<String> SETTING_SMTP_SSL_TRUST_ADDRESS =
Setting.affixKeySetting("xpack.notification.email.account.", "smtp.ssl.trust",
(key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope));
private static final Setting.AffixSetting<Integer> SETTING_SMTP_LOCAL_PORT =
Setting.affixKeySetting("xpack.notification.email.account.", "smtp.local_port",
(key) -> Setting.intSetting(key, 25, Property.Dynamic, Property.NodeScope));
private static final Setting.AffixSetting<Boolean> SETTING_SMTP_SEND_PARTIAL =
Setting.affixKeySetting("xpack.notification.email.account.", "smtp.send_partial",
(key) -> Setting.boolSetting(key, false, Property.Dynamic, Property.NodeScope));
private static final Setting.AffixSetting<Boolean> SETTING_SMTP_WAIT_ON_QUIT =
Setting.affixKeySetting("xpack.notification.email.account.", "smtp.wait_on_quit",
(key) -> Setting.boolSetting(key, true, Property.Dynamic, Property.NodeScope));
private final CryptoService cryptoService;
public EmailService(Settings settings, @Nullable CryptoService cryptoService, ClusterSettings clusterSettings) {
super(settings, "email", clusterSettings, EmailService.getSettings());
this.cryptoService = cryptoService;
// ensure logging of setting changes
clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_ACCOUNT, (s) -> {});
clusterSettings.addAffixUpdateConsumer(SETTING_PROFILE, (s, o) -> {}, (s, o) -> {});
clusterSettings.addAffixUpdateConsumer(SETTING_EMAIL_DEFAULTS, (s, o) -> {}, (s, o) -> {});
clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_AUTH, (s, o) -> {}, (s, o) -> {});
clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_STARTTLS_ENABLE, (s, o) -> {}, (s, o) -> {});
clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_STARTTLS_REQUIRED, (s, o) -> {}, (s, o) -> {});
clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_HOST, (s, o) -> {}, (s, o) -> {});
clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_PORT, (s, o) -> {}, (s, o) -> {});
clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_USER, (s, o) -> {}, (s, o) -> {});
clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_PASSWORD, (s, o) -> {}, (s, o) -> {});
clusterSettings.addAffixUpdateConsumer(SETTING_SECURE_PASSWORD, (s, o) -> {}, (s, o) -> {});
clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_TIMEOUT, (s, o) -> {}, (s, o) -> {});
clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_CONNECTION_TIMEOUT, (s, o) -> {}, (s, o) -> {});
clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_WRITE_TIMEOUT, (s, o) -> {}, (s, o) -> {});
clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_SSL_TRUST_ADDRESS, (s, o) -> {}, (s, o) -> {});
clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_LOCAL_ADDRESS, (s, o) -> {}, (s, o) -> {});
clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_LOCAL_PORT, (s, o) -> {}, (s, o) -> {});
clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_SEND_PARTIAL, (s, o) -> {}, (s, o) -> {});
clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_WAIT_ON_QUIT, (s, o) -> {}, (s, o) -> {});
// do an initial load
reload(settings);
}
@Override
protected Account createAccount(String name, Settings accountSettings) {
Account.Config config = new Account.Config(name, accountSettings);
return new Account(config, cryptoService, logger);
}
public EmailSent send(Email email, Authentication auth, Profile profile, String accountName) throws MessagingException {
Account account = getAccount(accountName);
if (account == null) {
throw new IllegalArgumentException("failed to send email with subject [" + email.subject() + "] via account [" + accountName
+ "]. account does not exist");
}
return send(email, auth, profile, account);
}
private EmailSent send(Email email, Authentication auth, Profile profile, Account account) throws MessagingException {
assert account != null;
try {
email = account.send(email, auth, profile);
} catch (MessagingException me) {
throw new MessagingException("failed to send email with subject [" + email.subject() + "] via account [" + account.name() +
"]", me);
}
return new EmailSent(account.name(), email);
}
public static class EmailSent {
private final String account;
private final Email email;
public EmailSent(String account, Email email) {
this.account = account;
this.email = email;
}
public String account() {
return account;
}
public Email email() {
return email;
}
}
public static List<Setting<?>> getSettings() {
return Arrays.asList(SETTING_DEFAULT_ACCOUNT, SETTING_PROFILE, SETTING_EMAIL_DEFAULTS, SETTING_SMTP_AUTH, SETTING_SMTP_HOST,
SETTING_SMTP_PASSWORD, SETTING_SMTP_PORT, SETTING_SMTP_STARTTLS_ENABLE, SETTING_SMTP_USER, SETTING_SMTP_STARTTLS_REQUIRED,
SETTING_SMTP_TIMEOUT, SETTING_SMTP_CONNECTION_TIMEOUT, SETTING_SMTP_WRITE_TIMEOUT, SETTING_SMTP_LOCAL_ADDRESS,
SETTING_SMTP_LOCAL_PORT, SETTING_SMTP_SEND_PARTIAL, SETTING_SMTP_WAIT_ON_QUIT, SETTING_SMTP_SSL_TRUST_ADDRESS,
SETTING_SECURE_PASSWORD);
}
}
| gfyoung/elasticsearch | x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java | Java | apache-2.0 | 10,810 |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Created by IntelliJ IDEA.
* User: max
* Date: Jan 26, 2002
* Time: 10:48:52 PM
* To change template for new class use
* Code Style | Class Templates options (Tools | IDE Options).
*/
package com.intellij.codeInspection.dataFlow.instructions;
import com.intellij.codeInspection.dataFlow.*;
import com.intellij.codeInspection.dataFlow.value.DfaValue;
import com.intellij.psi.*;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class MethodCallInstruction extends Instruction {
@Nullable private final PsiCall myCall;
@Nullable private final PsiType myType;
@NotNull private final PsiExpression[] myArgs;
private final boolean myShouldFlushFields;
@NotNull private final PsiElement myContext;
@Nullable private final PsiMethod myTargetMethod;
private final List<MethodContract> myContracts;
private final MethodType myMethodType;
@Nullable private final DfaValue myPrecalculatedReturnValue;
private final boolean myOfNullable;
private final boolean myVarArgCall;
private final Map<PsiExpression, Nullness> myArgRequiredNullability;
private boolean myOnlyNullArgs = true;
private boolean myOnlyNotNullArgs = true;
public enum MethodType {
BOXING, UNBOXING, REGULAR_METHOD_CALL, CAST
}
public MethodCallInstruction(@NotNull PsiExpression context, MethodType methodType, @Nullable PsiType resultType) {
myContext = context;
myContracts = Collections.emptyList();
myMethodType = methodType;
myCall = null;
myArgs = PsiExpression.EMPTY_ARRAY;
myType = resultType;
myShouldFlushFields = false;
myPrecalculatedReturnValue = null;
myTargetMethod = null;
myVarArgCall = false;
myOfNullable = false;
myArgRequiredNullability = Collections.emptyMap();
}
public MethodCallInstruction(@NotNull PsiCall call, @Nullable DfaValue precalculatedReturnValue, List<MethodContract> contracts) {
myContext = call;
myContracts = contracts;
myMethodType = MethodType.REGULAR_METHOD_CALL;
myCall = call;
final PsiExpressionList argList = call.getArgumentList();
myArgs = argList != null ? argList.getExpressions() : PsiExpression.EMPTY_ARRAY;
myType = myCall instanceof PsiCallExpression ? ((PsiCallExpression)myCall).getType() : null;
JavaResolveResult result = call.resolveMethodGenerics();
myTargetMethod = (PsiMethod)result.getElement();
PsiSubstitutor substitutor = result.getSubstitutor();
if (argList != null && myTargetMethod != null) {
PsiParameter[] parameters = myTargetMethod.getParameterList().getParameters();
myVarArgCall = isVarArgCall(myTargetMethod, substitutor, myArgs, parameters);
myArgRequiredNullability = calcArgRequiredNullability(substitutor, parameters);
} else {
myVarArgCall = false;
myArgRequiredNullability = Collections.emptyMap();
}
myShouldFlushFields = !(call instanceof PsiNewExpression && myType != null && myType.getArrayDimensions() > 0) && !isPureCall();
myPrecalculatedReturnValue = precalculatedReturnValue;
myOfNullable = call instanceof PsiMethodCallExpression && DfaOptionalSupport.resolveOfNullable((PsiMethodCallExpression)call) != null;
}
private Map<PsiExpression, Nullness> calcArgRequiredNullability(PsiSubstitutor substitutor, PsiParameter[] parameters) {
int checkedCount = Math.min(myArgs.length, parameters.length) - (myVarArgCall ? 1 : 0);
Map<PsiExpression, Nullness> map = ContainerUtil.newHashMap();
for (int i = 0; i < checkedCount; i++) {
map.put(myArgs[i], DfaPsiUtil.getElementNullability(substitutor.substitute(parameters[i].getType()), parameters[i]));
}
return map;
}
public static boolean isVarArgCall(PsiMethod method, PsiSubstitutor substitutor, PsiExpression[] args, PsiParameter[] parameters) {
if (!method.isVarArgs()) {
return false;
}
int argCount = args.length;
int paramCount = parameters.length;
if (argCount > paramCount) {
return true;
}
if (paramCount > 0 && argCount == paramCount) {
PsiType lastArgType = args[argCount - 1].getType();
if (lastArgType != null && !substitutor.substitute(parameters[paramCount - 1].getType()).isAssignableFrom(lastArgType)) {
return true;
}
}
return false;
}
private boolean isPureCall() {
if (myTargetMethod == null) return false;
return ControlFlowAnalyzer.isPure(myTargetMethod);
}
@Nullable
public PsiType getResultType() {
return myType;
}
@NotNull
public PsiExpression[] getArgs() {
return myArgs;
}
public MethodType getMethodType() {
return myMethodType;
}
public boolean shouldFlushFields() {
return myShouldFlushFields;
}
@Nullable
public PsiMethod getTargetMethod() {
return myTargetMethod;
}
public boolean isVarArgCall() {
return myVarArgCall;
}
@Nullable
public Nullness getArgRequiredNullability(@NotNull PsiExpression arg) {
return myArgRequiredNullability.get(arg);
}
public List<MethodContract> getContracts() {
return myContracts;
}
@Override
public DfaInstructionState[] accept(DataFlowRunner runner, DfaMemoryState stateBefore, InstructionVisitor visitor) {
return visitor.visitMethodCall(this, runner, stateBefore);
}
@Nullable
public PsiCall getCallExpression() {
return myCall;
}
@NotNull
public PsiElement getContext() {
return myContext;
}
@Nullable
public DfaValue getPrecalculatedReturnValue() {
return myPrecalculatedReturnValue;
}
public String toString() {
return myMethodType == MethodType.UNBOXING
? "UNBOX"
: myMethodType == MethodType.BOXING
? "BOX" :
"CALL_METHOD: " + (myCall == null ? "null" : myCall.getText());
}
public boolean updateOfNullable(DfaMemoryState memState, DfaValue arg) {
if (!myOfNullable) return false;
if (!memState.isNotNull(arg)) {
myOnlyNotNullArgs = false;
}
if (!memState.isNull(arg)) {
myOnlyNullArgs = false;
}
return true;
}
public boolean isOptionalAlwaysNullProblem() {
return myOfNullable && myOnlyNullArgs;
}
public boolean isOptionalAlwaysNotNullProblem() {
return myOfNullable && myOnlyNotNullArgs;
}
}
| idea4bsd/idea4bsd | java/java-analysis-impl/src/com/intellij/codeInspection/dataFlow/instructions/MethodCallInstruction.java | Java | apache-2.0 | 7,013 |
(function () {
var pigKeywordsU = pigKeywordsL = pigTypesU = pigTypesL = pigBuiltinsU = pigBuiltinsL = [];
var mimeMode = CodeMirror.mimeModes['text/x-pig'];
Object.keys(mimeMode.keywords).forEach( function(w) {
pigKeywordsU.push(w.toUpperCase());
pigKeywordsL.push(w.toLowerCase());
});
Object.keys(mimeMode.types).forEach( function(w) {
pigTypesU.push(w.toUpperCase());
pigTypesL.push(w.toLowerCase());
});
Object.keys(mimeMode.builtins).forEach( function(w) {
pigBuiltinsU.push(w.toUpperCase());
pigBuiltinsL.push(w.toLowerCase());
});
function forEach(arr, f) {
for (var i = 0, e = arr.length; i < e; ++i) {
f(arr[i]);
}
}
function arrayContains(arr, item) {
if (!Array.prototype.indexOf) {
var i = arr.length;
while (i--) {
if (arr[i] === item) {
return true;
}
}
return false;
}
return arr.indexOf(item) != -1;
}
function scriptHint(editor, keywords, getToken) {
// Find the token at the cursor
var cur = editor.getCursor(), token = getToken(editor, cur), tprop = token;
// If it's not a 'word-style' token, ignore the token.
if (!/^[\w$_]*$/.test(token.string)) {
token = tprop = {start: cur.ch, end: cur.ch, string: "", state: token.state,
type: token.string == ":" ? "pig-type" : null};
}
if (!context) var context = [];
context.push(tprop);
completionList = getCompletions(token, context);
completionList = completionList.sort();
return {list: completionList,
from: {line: cur.line, ch: token.start},
to: {line: cur.line, ch: token.end}};
}
function toTitleCase(str) {
return str.replace(/(?:^|\s)\w/g, function(match) {
return match.toUpperCase();
});
}
function getCompletions(token, context) {
var found = [], start = token.string;
function maybeAdd(str) {
if (str.indexOf(start) == 0 && !arrayContains(found, str)) found.push(str);
}
function gatherCompletions(obj) {
if(obj == ":") {
forEach(pigTypesL, maybeAdd);
}
else {
forEach(pigBuiltinsU, maybeAdd);
forEach(pigBuiltinsL, maybeAdd);
forEach(pigTypesU, maybeAdd);
forEach(pigTypesL, maybeAdd);
forEach(pigKeywordsU, maybeAdd);
forEach(pigKeywordsL, maybeAdd);
}
}
if (context) {
// If this is a property, see if it belongs to some object we can
// find in the current environment.
var obj = context.pop(), base;
if (obj.type == "pig-word")
base = obj.string;
else if(obj.type == "pig-type")
base = ":" + obj.string;
while (base != null && context.length)
base = base[context.pop().string];
if (base != null) gatherCompletions(base);
}
return found;
}
CodeMirror.registerHelper("hint", "pig", function(cm, options) {
return scriptHint(cm, pigKeywordsU, function (e, cur) {return e.getTokenAt(cur);});
});
})();
| radicalbit/ambari | contrib/views/pig/src/main/resources/ui/pig-web/vendor/pig-hint.js | JavaScript | apache-2.0 | 3,053 |
import {removeElement} from '#core/dom';
import {Layout_Enum, applyFillContent} from '#core/dom/layout';
import {Services} from '#service';
import {userAssert} from '#utils/log';
import {TAG as KEY_TAG} from './amp-embedly-key';
import {getIframe} from '../../../src/3p-frame';
import {listenFor} from '../../../src/iframe-helper';
/**
* Component tag identifier.
* @const {string}
*/
export const TAG = 'amp-embedly-card';
/**
* Attribute name used to set api key with name
* expected by embedly.
* @const {string}
*/
const API_KEY_ATTR_NAME = 'data-card-key';
/**
* Implementation of the amp-embedly-card component.
* See {@link ../amp-embedly-card.md} for the spec.
*/
export class AmpEmbedlyCard extends AMP.BaseElement {
/** @param {!AmpElement} element */
constructor(element) {
super(element);
/** @private {?HTMLIFrameElement} */
this.iframe_ = null;
/** @private {?string} */
this.apiKey_ = null;
}
/** @override */
buildCallback() {
userAssert(
this.element.getAttribute('data-url'),
'The data-url attribute is required for <%s> %s',
TAG,
this.element
);
const ampEmbedlyKeyElement = document.querySelector(KEY_TAG);
if (ampEmbedlyKeyElement) {
this.apiKey_ = ampEmbedlyKeyElement.getAttribute('value');
}
}
/** @override */
layoutCallback() {
// Add optional paid api key attribute if provided
// to remove embedly branding.
if (this.apiKey_) {
this.element.setAttribute(API_KEY_ATTR_NAME, this.apiKey_);
}
const iframe = getIframe(this.win, this.element, 'embedly');
iframe.title = this.element.title || 'Embedly card';
const opt_is3P = true;
listenFor(
iframe,
'embed-size',
(data) => {
this.forceChangeHeight(data['height']);
},
opt_is3P
);
applyFillContent(iframe);
this.getVsync().mutate(() => {
this.element.appendChild(iframe);
});
this.iframe_ = iframe;
return this.loadPromise(iframe);
}
/** @override */
unlayoutCallback() {
if (this.iframe_) {
removeElement(this.iframe_);
this.iframe_ = null;
}
return true;
}
/** @override */
isLayoutSupported(layout) {
return layout == Layout_Enum.RESPONSIVE;
}
/**
* @param {boolean=} opt_onLayout
* @override
*/
preconnectCallback(opt_onLayout) {
Services.preconnectFor(this.win).url(
this.getAmpDoc(),
'https://cdn.embedly.com',
opt_onLayout
);
}
}
| media-net/amphtml | extensions/amp-embedly-card/0.1/amp-embedly-card-impl.js | JavaScript | apache-2.0 | 2,517 |
package com.google.api.ads.dfp.jaxws.v201408;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
/**
*
* A {@code LiveStreamEvent} encapsulates all the information necessary
* to enable DAI (Dynamic Ad Insertion) into a live video stream.
*
* <p>This includes information such as the start and expected end time of
* the event, the URL of the actual content for DFP to pull and insert ads into,
* as well as the metadata necessary to generate ad requests during the event.
*
*
* <p>Java class for LiveStreamEvent complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="LiveStreamEvent">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="id" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="name" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="description" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="status" type="{https://www.google.com/apis/ads/publisher/v201408}LiveStreamEventStatus" minOccurs="0"/>
* <element name="creationDateTime" type="{https://www.google.com/apis/ads/publisher/v201408}DateTime" minOccurs="0"/>
* <element name="lastModifiedDateTime" type="{https://www.google.com/apis/ads/publisher/v201408}DateTime" minOccurs="0"/>
* <element name="startDateTime" type="{https://www.google.com/apis/ads/publisher/v201408}DateTime" minOccurs="0"/>
* <element name="endDateTime" type="{https://www.google.com/apis/ads/publisher/v201408}DateTime" minOccurs="0"/>
* <element name="totalEstimatedConcurrentUsers" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="contentUrls" type="{http://www.w3.org/2001/XMLSchema}string" maxOccurs="unbounded" minOccurs="0"/>
* <element name="adTags" type="{http://www.w3.org/2001/XMLSchema}string" maxOccurs="unbounded" minOccurs="0"/>
* <element name="liveStreamEventCode" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "LiveStreamEvent", propOrder = {
"id",
"name",
"description",
"status",
"creationDateTime",
"lastModifiedDateTime",
"startDateTime",
"endDateTime",
"totalEstimatedConcurrentUsers",
"contentUrls",
"adTags",
"liveStreamEventCode"
})
public class LiveStreamEvent {
protected Long id;
protected String name;
protected String description;
@XmlSchemaType(name = "string")
protected LiveStreamEventStatus status;
protected DateTime creationDateTime;
protected DateTime lastModifiedDateTime;
protected DateTime startDateTime;
protected DateTime endDateTime;
protected Long totalEstimatedConcurrentUsers;
protected List<String> contentUrls;
protected List<String> adTags;
protected String liveStreamEventCode;
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link Long }
*
*/
public Long getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
public void setId(Long value) {
this.id = value;
}
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
/**
* Gets the value of the description property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDescription() {
return description;
}
/**
* Sets the value of the description property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDescription(String value) {
this.description = value;
}
/**
* Gets the value of the status property.
*
* @return
* possible object is
* {@link LiveStreamEventStatus }
*
*/
public LiveStreamEventStatus getStatus() {
return status;
}
/**
* Sets the value of the status property.
*
* @param value
* allowed object is
* {@link LiveStreamEventStatus }
*
*/
public void setStatus(LiveStreamEventStatus value) {
this.status = value;
}
/**
* Gets the value of the creationDateTime property.
*
* @return
* possible object is
* {@link DateTime }
*
*/
public DateTime getCreationDateTime() {
return creationDateTime;
}
/**
* Sets the value of the creationDateTime property.
*
* @param value
* allowed object is
* {@link DateTime }
*
*/
public void setCreationDateTime(DateTime value) {
this.creationDateTime = value;
}
/**
* Gets the value of the lastModifiedDateTime property.
*
* @return
* possible object is
* {@link DateTime }
*
*/
public DateTime getLastModifiedDateTime() {
return lastModifiedDateTime;
}
/**
* Sets the value of the lastModifiedDateTime property.
*
* @param value
* allowed object is
* {@link DateTime }
*
*/
public void setLastModifiedDateTime(DateTime value) {
this.lastModifiedDateTime = value;
}
/**
* Gets the value of the startDateTime property.
*
* @return
* possible object is
* {@link DateTime }
*
*/
public DateTime getStartDateTime() {
return startDateTime;
}
/**
* Sets the value of the startDateTime property.
*
* @param value
* allowed object is
* {@link DateTime }
*
*/
public void setStartDateTime(DateTime value) {
this.startDateTime = value;
}
/**
* Gets the value of the endDateTime property.
*
* @return
* possible object is
* {@link DateTime }
*
*/
public DateTime getEndDateTime() {
return endDateTime;
}
/**
* Sets the value of the endDateTime property.
*
* @param value
* allowed object is
* {@link DateTime }
*
*/
public void setEndDateTime(DateTime value) {
this.endDateTime = value;
}
/**
* Gets the value of the totalEstimatedConcurrentUsers property.
*
* @return
* possible object is
* {@link Long }
*
*/
public Long getTotalEstimatedConcurrentUsers() {
return totalEstimatedConcurrentUsers;
}
/**
* Sets the value of the totalEstimatedConcurrentUsers property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
public void setTotalEstimatedConcurrentUsers(Long value) {
this.totalEstimatedConcurrentUsers = value;
}
/**
* Gets the value of the contentUrls property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the contentUrls property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getContentUrls().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getContentUrls() {
if (contentUrls == null) {
contentUrls = new ArrayList<String>();
}
return this.contentUrls;
}
/**
* Gets the value of the adTags property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the adTags property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getAdTags().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getAdTags() {
if (adTags == null) {
adTags = new ArrayList<String>();
}
return this.adTags;
}
/**
* Gets the value of the liveStreamEventCode property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getLiveStreamEventCode() {
return liveStreamEventCode;
}
/**
* Sets the value of the liveStreamEventCode property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setLiveStreamEventCode(String value) {
this.liveStreamEventCode = value;
}
}
| shyTNT/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201408/LiveStreamEvent.java | Java | apache-2.0 | 10,126 |
#
# Author:: Dan Crosta (<dcrosta@late.am>)
# Copyright:: Copyright (c) 2012 OpsCode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
describe Chef::Provider::Group::Groupmod do
before do
@node = Chef::Node.new
@events = Chef::EventDispatch::Dispatcher.new
@run_context = Chef::RunContext.new(@node, {}, @events)
@new_resource = Chef::Resource::Group.new("wheel")
@new_resource.gid 123
@new_resource.members %w{lobster rage fist}
@new_resource.append false
@provider = Chef::Provider::Group::Groupmod.new(@new_resource, @run_context)
end
describe "manage_group" do
describe "when determining the current group state" do
it "should raise an error if the required binary /usr/sbin/group doesn't exist" do
File.should_receive(:exists?).with("/usr/sbin/group").and_return(false)
lambda { @provider.load_current_resource }.should raise_error(Chef::Exceptions::Group)
end
it "should raise an error if the required binary /usr/sbin/user doesn't exist" do
File.should_receive(:exists?).with("/usr/sbin/group").and_return(true)
File.should_receive(:exists?).with("/usr/sbin/user").and_return(false)
lambda { @provider.load_current_resource }.should raise_error(Chef::Exceptions::Group)
end
it "shouldn't raise an error if the required binaries exist" do
File.stub(:exists?).and_return(true)
lambda { @provider.load_current_resource }.should_not raise_error
end
end
describe "after the group's current state is known" do
before do
@current_resource = @new_resource.dup
@provider.current_resource = @current_resource
end
describe "when no group members are specified and append is not set" do
before do
@new_resource.append(false)
@new_resource.members([])
end
it "logs a message and sets group's members to 'none', then removes existing group members" do
Chef::Log.should_receive(:debug).with("group[wheel] setting group members to: none")
@provider.should_receive(:shell_out!).with("group mod -n wheel_bak wheel")
@provider.should_receive(:shell_out!).with("group add -g '123' -o wheel")
@provider.should_receive(:shell_out!).with("group del wheel_bak")
@provider.manage_group
end
end
describe "when no group members are specified and append is set" do
before do
@new_resource.append(true)
@new_resource.members([])
end
it "logs a message and does not modify group membership" do
Chef::Log.should_receive(:debug).with("group[wheel] not changing group members, the group has no members to add")
@provider.should_not_receive(:shell_out!)
@provider.manage_group
end
end
describe "when removing some group members" do
before do
@new_resource.append(false)
@new_resource.members(%w{ lobster })
end
it "updates group membership correctly" do
Chef::Log.stub(:debug)
@provider.should_receive(:shell_out!).with("group mod -n wheel_bak wheel")
@provider.should_receive(:shell_out!).with("user mod -G wheel lobster")
@provider.should_receive(:shell_out!).with("group add -g '123' -o wheel")
@provider.should_receive(:shell_out!).with("group del wheel_bak")
@provider.manage_group
end
end
end
end
describe "create_group" do
describe "when creating a new group" do
before do
@current_resource = Chef::Resource::Group.new("wheel")
@provider.current_resource = @current_resource
end
it "should run a group add command and some user mod commands" do
@provider.should_receive(:shell_out!).with("group add -g '123' wheel")
@provider.should_receive(:shell_out!).with("user mod -G wheel lobster")
@provider.should_receive(:shell_out!).with("user mod -G wheel rage")
@provider.should_receive(:shell_out!).with("user mod -G wheel fist")
@provider.create_group
end
end
end
describe "remove_group" do
describe "when removing an existing group" do
before do
@current_resource = @new_resource.dup
@provider.current_resource = @current_resource
end
it "should run a group del command" do
@provider.should_receive(:shell_out!).with("group del wheel")
@provider.remove_group
end
end
end
end
| sysbot/chef | spec/unit/provider/group/groupmod_spec.rb | Ruby | apache-2.0 | 5,123 |
/*
* Copyright 2017 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.base.Predicates.not;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.javascript.rhino.jstype.FunctionType;
import com.google.javascript.rhino.jstype.JSType;
import com.google.javascript.rhino.jstype.NamedType;
import com.google.javascript.rhino.jstype.NoType;
import com.google.javascript.rhino.jstype.ObjectType;
import com.google.javascript.rhino.jstype.UnionType;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests exercising {@link CompilerOptions#assumeForwardDeclaredForMissingTypes} and {@link
* DiagnosticGroups#MISSING_SOURCES_WARNINGS}.
*/
@RunWith(JUnit4.class)
public class PartialCompilationTest {
private Compiler compiler;
/**
* Asserts that the given lines of code compile and only give errors matching the {@link
* DiagnosticGroups#MISSING_SOURCES_WARNINGS} category.
*/
private void assertPartialCompilationSucceeds(String... code) throws Exception {
compiler = new Compiler();
compiler.setErrorManager(
new BasicErrorManager() {
@Override
public void report(CheckLevel level, JSError error) {
super.report(CheckLevel.ERROR, error);
}
@Override
public void println(CheckLevel level, JSError error) {
/* no-op */
}
@Override
protected void printSummary() {
/* no-op */
}
});
CompilerOptions options = new CompilerOptions();
options.setAssumeForwardDeclaredForMissingTypes(true);
options.setStrictModeInput(true);
options.setPreserveDetailedSourceInfo(true);
CompilationLevel.ADVANCED_OPTIMIZATIONS.setOptionsForCompilationLevel(options);
compiler.init(
ImmutableList.of(),
Collections.singletonList(SourceFile.fromCode("input.js", Joiner.on('\n').join(code))),
options);
compiler.parse();
compiler.check();
ImmutableList<JSError> sourcesErrors =
compiler.getErrors().stream()
.filter(not(DiagnosticGroups.MISSING_SOURCES_WARNINGS::matches))
.collect(toImmutableList());
assertThat(sourcesErrors).isEmpty();
}
@Test
public void testUsesMissingCode() throws Exception {
assertPartialCompilationSucceeds(
"goog.provide('missing_code_user');",
"goog.require('some.thing.Missing');",
"missing_code_user.fnUsesMissingNs = function() {",
" missing_code_user.missingNamespace.foo();",
" missingTopLevelNamespace.bar();",
"};");
}
@Test
public void testMissingType_variable() throws Exception {
assertPartialCompilationSucceeds("/** @type {!some.thing.Missing} */ var foo;");
}
@Test
public void testMissingType_assignment() throws Exception {
assertPartialCompilationSucceeds(
"/** @type {!some.thing.Missing} */ var foo;", // line break
"/** @type {number} */ var bar = foo;");
}
@Test
public void testMissingRequire() throws Exception {
assertPartialCompilationSucceeds(
"goog.provide('missing_extends');", // line break
"goog.require('some.thing.Missing');");
}
@Test
public void testMissingExtends() throws Exception {
assertPartialCompilationSucceeds(
"goog.provide('missing_extends');",
"/** @constructor @extends {some.thing.Missing} */",
"missing_extends.Extends = function() {}");
}
@Test
public void testMissingExtends_template() throws Exception {
assertPartialCompilationSucceeds(
"goog.provide('missing_extends');",
"/** @constructor @extends {some.thing.Missing<string>} x */",
"missing_extends.Extends = function() {}");
}
@Test
public void testMissingType_typedefAlias() throws Exception {
assertPartialCompilationSucceeds("/** @typedef {string} */ var typedef;");
}
@Test
public void testMissingType_typedefField() throws Exception {
assertPartialCompilationSucceeds("/** @typedef {some.thing.Missing} */ var typedef;");
}
@Test
public void testMissingEs6Externs() throws Exception {
assertPartialCompilationSucceeds("let foo = {a, b};");
}
@Test
public void testUnresolvedGenerics() throws Exception {
assertPartialCompilationSucceeds(
"/** @type {!some.thing.Missing<string, !AlsoMissing<!More>>} */", "var x;");
TypedVar x = compiler.getTopScope().getSlot("x");
assertWithMessage("type %s", x.getType()).that(x.getType().isNoResolvedType()).isTrue();
NoType templatizedType = (NoType) x.getType();
assertThat(templatizedType.getReferenceName()).isEqualTo("some.thing.Missing");
ImmutableList<JSType> templateTypes = templatizedType.getTemplateTypes();
assertThat(templateTypes.get(0).isString()).isTrue();
assertThat(templateTypes.get(1).isObject()).isTrue();
ObjectType alsoMissing = (ObjectType) templateTypes.get(1);
assertThat(alsoMissing.getReferenceName()).isEqualTo("AlsoMissing");
assertThat(alsoMissing.getTemplateTypes()).hasSize(1);
ObjectType more = (ObjectType) alsoMissing.getTemplateTypes().get(0);
assertThat(more.getReferenceName()).isEqualTo("More");
}
@Test
public void testUnresolvedUnions() throws Exception {
assertPartialCompilationSucceeds("/** @type {some.thing.Foo|some.thing.Bar} */", "var x;");
TypedVar x = compiler.getTopScope().getSlot("x");
assertWithMessage("type %s", x.getType()).that(x.getType().isUnionType()).isTrue();
UnionType unionType = (UnionType) x.getType();
Collection<JSType> alternatives = unionType.getAlternates();
assertThat(alternatives).hasSize(3);
int nullTypeCount = 0;
List<String> namedTypes = new ArrayList<>();
for (JSType alternative : alternatives) {
assertThat(alternative.isNamedType() || alternative.isNullType()).isTrue();
if (alternative.isNamedType()) {
assertThat(alternative.isNoResolvedType()).isTrue();
namedTypes.add(((NamedType) alternative).getReferenceName());
}
if (alternative.isNullType()) {
nullTypeCount++;
}
}
assertThat(nullTypeCount).isEqualTo(1);
assertThat(namedTypes).containsExactly("some.thing.Foo", "some.thing.Bar");
}
@Test
public void testUnresolvedGenerics_defined() throws Exception {
assertPartialCompilationSucceeds(
"/** @param {!some.thing.Missing<string>} x */",
"function useMissing(x) {}",
"/** @const {!some.thing.Missing<string>} */",
"var x;",
"/** @constructor @template T */",
"some.thing.Missing = function () {}",
"function missingInside() {",
" useMissing(new some.thing.Missing());",
"}");
}
@Test
public void testUnresolvedBaseClassDoesNotHideFields() throws Exception {
assertPartialCompilationSucceeds(
"/** @constructor @extends {MissingBase} */",
"var Klass = function () {",
" /** @type {string} */",
" this.foo;",
"};");
TypedVar x = compiler.getTopScope().getSlot("Klass");
JSType type = x.getType();
assertThat(type.isFunctionType()).isTrue();
FunctionType fType = (FunctionType) type;
assertThat(fType.getTypeOfThis().hasProperty("foo")).isTrue();
}
}
| GoogleChromeLabs/chromeos_smart_card_connector | third_party/closure-compiler/src/test/com/google/javascript/jscomp/PartialCompilationTest.java | Java | apache-2.0 | 8,229 |
#
# Author:: Prajakta Purohit (prajakta@opscode.com)
# Copyright:: Copyright (c) 2012 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
describe Chef::Provider::RegistryKey do
let(:value1) { { :name => "one", :type => :string, :data => "1" } }
let(:key_path) { 'HKCU\Software\OpscodeNumbers' }
let(:key) { 'Software\OpscodeNumbers' }
let(:key_parent) { 'Software' }
let(:key_to_delete) { 'OpscodeNumbers' }
let(:sub_key) {'OpscodePrimes'}
let(:missing_key_path) {'HKCU\Software'}
before(:each) do
Chef::Win32::Registry.any_instance.stub(:machine_architecture).and_return(:x86_64)
@registry = Chef::Win32::Registry.new()
#Making the values for registry constants available on unix
Object.send(:remove_const, 'Win32') if defined?(Win32)
Win32 = Module.new
Win32::Registry = Class.new
Win32::Registry::KEY_SET_VALUE = 0x0002
Win32::Registry::KEY_QUERY_VALUE = 0x0001
Win32::Registry::KEY_WRITE = 0x00020000 | 0x0002 | 0x0004
Win32::Registry::KEY_READ = 0x00020000 | 0x0001 | 0x0008 | 0x0010
Win32::Registry::Error = Class.new(RuntimeError)
@hive_mock = double("::Win32::Registry::HKEY_CURRENT_USER")
@reg_mock = double("reg")
end
describe "get_values" do
it "gets all values for a key if the key exists" do
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@registry.should_receive(:key_exists!).with(key_path).and_return(true)
@hive_mock.should_receive(:open).with(key, ::Win32::Registry::KEY_READ | @registry.registry_system_architecture).and_yield(@reg_mock)
@reg_mock.should_receive(:map)
@registry.get_values(key_path)
end
it "throws an exception if key does not exist" do
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@registry.should_receive(:key_exists!).with(key_path).and_raise(Chef::Exceptions::Win32RegKeyMissing)
lambda{@registry.get_values(key_path)}.should raise_error(Chef::Exceptions::Win32RegKeyMissing)
end
end
describe "set_value" do
it "does nothing if key and hive and value exist" do
@registry.should_receive(:key_exists!).with(key_path).and_return(true)
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@registry.should_receive(:value_exists?).with(key_path, value1).and_return(true)
@registry.should_receive(:data_exists?).with(key_path, value1).and_return(true)
@registry.set_value(key_path, value1)
end
it "updates value if key and hive and value exist, but data is different" do
@registry.should_receive(:key_exists!).with(key_path).and_return(true)
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@registry.should_receive(:value_exists?).with(key_path, value1).and_return(true)
@registry.should_receive(:data_exists?).with(key_path, value1).and_return(false)
@hive_mock.should_receive(:open).with(key, Win32::Registry::KEY_SET_VALUE | ::Win32::Registry::KEY_QUERY_VALUE | @registry.registry_system_architecture).and_yield(@reg_mock)
@registry.should_receive(:get_type_from_name).with(:string).and_return(1)
@reg_mock.should_receive(:write).with("one", 1, "1")
@registry.set_value(key_path, value1)
end
it "creates value if the key exists and the value does not exist" do
@registry.should_receive(:key_exists!).with(key_path).and_return(true)
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@registry.should_receive(:value_exists?).with(key_path, value1).and_return(false)
@hive_mock.should_receive(:open).with(key, ::Win32::Registry::KEY_SET_VALUE | ::Win32::Registry::KEY_QUERY_VALUE | @registry.registry_system_architecture).and_yield(@reg_mock)
@registry.should_receive(:get_type_from_name).with(:string).and_return(1)
@reg_mock.should_receive(:write).with("one", 1, "1")
@registry.set_value(key_path, value1)
end
it "should raise an exception if the key does not exist" do
@registry.should_receive(:key_exists!).with(key_path).and_raise(Chef::Exceptions::Win32RegKeyMissing)
lambda {@registry.set_value(key_path, value1)}.should raise_error(Chef::Exceptions::Win32RegKeyMissing)
end
end
describe "delete_value" do
it "deletes value if value exists" do
@registry.should_receive(:value_exists?).with(key_path, value1).and_return(true)
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@hive_mock.should_receive(:open).with(key, ::Win32::Registry::KEY_SET_VALUE | @registry.registry_system_architecture).and_yield(@reg_mock)
@reg_mock.should_receive(:delete_value).with("one").and_return(true)
@registry.delete_value(key_path, value1)
end
it "raises an exception if the key does not exist" do
@registry.should_receive(:value_exists?).with(key_path, value1).and_return(true)
@registry.should_receive(:get_hive_and_key).with(key_path).and_raise(Chef::Exceptions::Win32RegKeyMissing)
@registry.delete_value(key_path, value1)
end
it "does nothing if the value does not exist" do
@registry.should_receive(:value_exists?).with(key_path, value1).and_return(false)
@registry.delete_value(key_path, value1)
end
end
describe "create_key" do
it "creates key if intermediate keys are missing and recursive is set to true" do
@registry.should_receive(:keys_missing?).with(key_path).and_return(true)
@registry.should_receive(:create_missing).with(key_path)
@registry.should_receive(:key_exists?).with(key_path).and_return(false)
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@hive_mock.should_receive(:create).with(key, ::Win32::Registry::KEY_WRITE | @registry.registry_system_architecture)
@registry.create_key(key_path, true)
end
it "raises an exception if intermediate keys are missing and recursive is set to false" do
@registry.should_receive(:keys_missing?).with(key_path).and_return(true)
lambda{@registry.create_key(key_path, false)}.should raise_error(Chef::Exceptions::Win32RegNoRecursive)
end
it "does nothing if the key exists" do
@registry.should_receive(:keys_missing?).with(key_path).and_return(true)
@registry.should_receive(:create_missing).with(key_path)
@registry.should_receive(:key_exists?).with(key_path).and_return(true)
@registry.create_key(key_path, true)
end
it "create key if intermediate keys not missing and recursive is set to false" do
@registry.should_receive(:keys_missing?).with(key_path).and_return(false)
@registry.should_receive(:key_exists?).with(key_path).and_return(false)
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@hive_mock.should_receive(:create).with(key, ::Win32::Registry::KEY_WRITE | @registry.registry_system_architecture)
@registry.create_key(key_path, false)
end
it "create key if intermediate keys not missing and recursive is set to true" do
@registry.should_receive(:keys_missing?).with(key_path).and_return(false)
@registry.should_receive(:key_exists?).with(key_path).and_return(false)
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@hive_mock.should_receive(:create).with(key, ::Win32::Registry::KEY_WRITE | @registry.registry_system_architecture)
@registry.create_key(key_path, true)
end
end
describe "delete_key", :windows_only do
it "deletes key if it has subkeys and recursive is set to true" do
@registry.should_receive(:key_exists?).with(key_path).and_return(true)
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@registry.should_receive(:has_subkeys?).with(key_path).and_return(true)
@registry.should_receive(:get_subkeys).with(key_path).and_return([sub_key])
@registry.should_receive(:key_exists?).with(key_path+"\\"+sub_key).and_return(true)
@registry.should_receive(:get_hive_and_key).with(key_path+"\\"+sub_key).and_return([@hive_mock, key+"\\"+sub_key])
@registry.should_receive(:has_subkeys?).with(key_path+"\\"+sub_key).and_return(false)
@registry.should_receive(:delete_key_ex).twice
@registry.delete_key(key_path, true)
end
it "raises an exception if it has subkeys but recursive is set to false" do
@registry.should_receive(:key_exists?).with(key_path).and_return(true)
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@registry.should_receive(:has_subkeys?).with(key_path).and_return(true)
lambda{@registry.delete_key(key_path, false)}.should raise_error(Chef::Exceptions::Win32RegNoRecursive)
end
it "deletes key if the key exists and has no subkeys" do
@registry.should_receive(:key_exists?).with(key_path).and_return(true)
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@registry.should_receive(:has_subkeys?).with(key_path).and_return(false)
@registry.should_receive(:delete_key_ex)
@registry.delete_key(key_path, true)
end
end
describe "key_exists?" do
it "returns true if key_exists" do
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@hive_mock.should_receive(:open).with(key, ::Win32::Registry::KEY_READ | @registry.registry_system_architecture).and_yield(@reg_mock)
@registry.key_exists?(key_path).should == true
end
it "returns false if key does not exist" do
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@hive_mock.should_receive(:open).with(key, ::Win32::Registry::KEY_READ | @registry.registry_system_architecture).and_raise(::Win32::Registry::Error)
@registry.key_exists?(key_path).should == false
end
end
describe "key_exists!" do
it "throws an exception if the key_parent does not exist" do
@registry.should_receive(:key_exists?).with(key_path).and_return(false)
lambda{@registry.key_exists!(key_path)}.should raise_error(Chef::Exceptions::Win32RegKeyMissing)
end
end
describe "hive_exists?" do
it "returns true if the hive exists" do
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@registry.hive_exists?(key_path) == true
end
it "returns false if the hive does not exist" do
@registry.should_receive(:get_hive_and_key).with(key_path).and_raise(Chef::Exceptions::Win32RegHiveMissing)
@registry.hive_exists?(key_path) == false
end
end
describe "has_subkeys?" do
it "returns true if the key has subkeys" do
@registry.should_receive(:key_exists!).with(key_path).and_return(true)
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@hive_mock.should_receive(:open).with(key, ::Win32::Registry::KEY_READ | @registry.registry_system_architecture).and_yield(@reg_mock)
@reg_mock.should_receive(:each_key).and_yield(key)
@registry.has_subkeys?(key_path) == true
end
it "returns false if the key does not have subkeys" do
@registry.should_receive(:key_exists!).with(key_path).and_return(true)
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@hive_mock.should_receive(:open).with(key, ::Win32::Registry::KEY_READ | @registry.registry_system_architecture).and_yield(@reg_mock)
@reg_mock.should_receive(:each_key).and_return(no_args())
@registry.has_subkeys?(key_path).should == false
end
it "throws an exception if the key does not exist" do
@registry.should_receive(:key_exists!).with(key_path).and_raise(Chef::Exceptions::Win32RegKeyMissing)
lambda {@registry.set_value(key_path, value1)}.should raise_error(Chef::Exceptions::Win32RegKeyMissing)
end
end
describe "get_subkeys" do
it "returns the subkeys if they exist" do
@registry.should_receive(:key_exists!).with(key_path).and_return(true)
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@hive_mock.should_receive(:open).with(key, ::Win32::Registry::KEY_READ | @registry.registry_system_architecture).and_yield(@reg_mock)
@reg_mock.should_receive(:each_key).and_yield(sub_key)
@registry.get_subkeys(key_path)
end
end
describe "value_exists?" do
it "throws an exception if the key does not exist" do
@registry.should_receive(:key_exists!).with(key_path).and_raise(Chef::Exceptions::Win32RegKeyMissing)
lambda {@registry.value_exists?(key_path, value1)}.should raise_error(Chef::Exceptions::Win32RegKeyMissing)
end
it "returns true if the value exists" do
@registry.should_receive(:key_exists!).with(key_path).and_return(true)
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@hive_mock.should_receive(:open).with(key, ::Win32::Registry::KEY_READ | @registry.registry_system_architecture).and_yield(@reg_mock)
@reg_mock.should_receive(:any?).and_yield("one")
@registry.value_exists?(key_path, value1) == true
end
it "returns false if the value does not exist" do
@registry.should_receive(:key_exists!).with(key_path).and_return(true)
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@hive_mock.should_receive(:open).with(key, ::Win32::Registry::KEY_READ | @registry.registry_system_architecture).and_yield(@reg_mock)
@reg_mock.should_receive(:any?).and_yield(no_args())
@registry.value_exists?(key_path, value1) == false
end
end
describe "data_exists?" do
it "throws an exception if the key does not exist" do
@registry.should_receive(:key_exists!).with(key_path).and_raise(Chef::Exceptions::Win32RegKeyMissing)
lambda {@registry.data_exists?(key_path, value1)}.should raise_error(Chef::Exceptions::Win32RegKeyMissing)
end
it "returns true if the data exists" do
@registry.should_receive(:key_exists!).with(key_path).and_return(true)
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@registry.should_receive(:get_type_from_name).with(:string).and_return(1)
@reg_mock.should_receive(:each).with(no_args()).and_yield("one", 1, "1")
@hive_mock.should_receive(:open).with(key, ::Win32::Registry::KEY_READ | @registry.registry_system_architecture).and_yield(@reg_mock)
@registry.data_exists?(key_path, value1).should == true
end
it "returns false if the data does not exist" do
@registry.should_receive(:key_exists!).with(key_path).and_return(true)
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@hive_mock.should_receive(:open).with(key, ::Win32::Registry::KEY_READ | @registry.registry_system_architecture).and_yield(@reg_mock)
@registry.should_receive(:get_type_from_name).with(:string).and_return(1)
@reg_mock.should_receive(:each).with(no_args()).and_yield("one", 1, "2")
@registry.data_exists?(key_path, value1).should == false
end
end
describe "value_exists!" do
it "does nothing if the value exists" do
@registry.should_receive(:value_exists?).with(key_path, value1).and_return(true)
@registry.value_exists!(key_path, value1)
end
it "throws an exception if the value does not exist" do
@registry.should_receive(:value_exists?).with(key_path, value1).and_return(false)
lambda{@registry.value_exists!(key_path, value1)}.should raise_error(Chef::Exceptions::Win32RegValueMissing)
end
end
describe "data_exists!" do
it "does nothing if the data exists" do
@registry.should_receive(:data_exists?).with(key_path, value1).and_return(true)
@registry.data_exists!(key_path, value1)
end
it "throws an exception if the data does not exist" do
@registry.should_receive(:data_exists?).with(key_path, value1).and_return(false)
lambda{@registry.data_exists!(key_path, value1)}.should raise_error(Chef::Exceptions::Win32RegDataMissing)
end
end
describe "type_matches?" do
it "returns true if type matches" do
@registry.should_receive(:value_exists!).with(key_path, value1).and_return(true)
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@hive_mock.should_receive(:open).with(key, ::Win32::Registry::KEY_READ | @registry.registry_system_architecture).and_yield(@reg_mock)
@registry.should_receive(:get_type_from_name).with(:string).and_return(1)
@reg_mock.should_receive(:each).and_yield("one", 1)
@registry.type_matches?(key_path, value1).should == true
end
it "returns false if type does not match" do
@registry.should_receive(:value_exists!).with(key_path, value1).and_return(true)
@registry.should_receive(:get_hive_and_key).with(key_path).and_return([@hive_mock, key])
@hive_mock.should_receive(:open).with(key, ::Win32::Registry::KEY_READ | @registry.registry_system_architecture).and_yield(@reg_mock)
@reg_mock.should_receive(:each).and_yield("two", 2)
@registry.type_matches?(key_path, value1).should == false
end
it "throws an exception if value does not exist" do
@registry.should_receive(:value_exists?).with(key_path, value1).and_return(false)
lambda{@registry.type_matches?(key_path, value1)}.should raise_error(Chef::Exceptions::Win32RegValueMissing)
end
end
describe "type_matches!" do
it "does nothing if the type_matches" do
@registry.should_receive(:type_matches?).with(key_path, value1).and_return(true)
@registry.type_matches!(key_path, value1)
end
it "throws an exception if the type does not match" do
@registry.should_receive(:type_matches?).with(key_path, value1).and_return(false)
lambda{@registry.type_matches!(key_path, value1)}.should raise_error(Chef::Exceptions::Win32RegTypesMismatch)
end
end
describe "keys_missing?" do
it "returns true if the keys are missing" do
@registry.should_receive(:key_exists?).with(missing_key_path).and_return(false)
@registry.keys_missing?(key_path).should == true
end
it "returns false if no keys in the path are missing" do
@registry.should_receive(:key_exists?).with(missing_key_path).and_return(true)
@registry.keys_missing?(key_path).should == false
end
end
end
| ccope/chef | spec/unit/registry_helper_spec.rb | Ruby | apache-2.0 | 19,296 |
#!/usr/bin/env jruby
require 'jrubyfx'
##
# Example showing a tree_view and also how complicated you can tailor
# behavior of a tree_view. In this example we allow you to drag and drop
# tree_items around. There are three things you can see in d&d here:
# 1. drag onto other tree item will put it as a child in that tree item
# 2. drag and drop to rearrange items
# 3. drag and drop into another window...it should paste the contents there
#
# This example also allows you to edit your tree and change the text of
# existing contents. Just double click the item to edit it and hit escape
# to cancel or return to save the new name.
#
class DraggableTreeCell < Java::javafx::scene::control::TreeCell
include JRubyFX::DSL
SELECTION_PERCENT = 0.25
class << self
attr_accessor :drag_item, :drop_type
end
def initialize()
super
set_on_drag_over do |event|
if !child_of_target? && !over_self?
if drop_into_range? y_percentage(event)
set_effect inner_shadow(offset_x: 1.0)
self.class.drop_type = :drop_into
else
set_effect nil
self.class.drop_type = :rearrange
end
event.accept_transfer_modes :move
end
end
set_on_drag_detected do |event|
drag_item = tree_item
if drag_item
content = clipboard_content { put_string drag_item.value }
tree_view.start_drag_and_drop(TransferMode::MOVE).set_content content
self.class.drag_item = drag_item
end
event.consume
end
set_on_drag_dropped do |event|
if drag_item && tree_item
drop_into if drop_type == :drop_into
rearrange(event) if drop_type == :rearrange
self.class.drag_item = nil
event.drop_completed = true
end
event.consume
end
set_on_drag_exited do |event|
set_effect nil
end
end
def y_percentage(event)
y = event.scene_y - local_to_scene(0, 0).y
y == 0 ? 0 : y / height
end
def child_of_target?(parent = tree_item)
return true if drag_item == parent
return false if !parent || !parent.parent
child_of_target?(parent.parent)
end
def drop_into
if !child_of_target? && !over_self?
drag_item.parent.children.remove(drag_item)
tree_item.children.add(drag_item)
tree_item.expanded = true
end
end
def drop_into_range?(percent)
percent >= SELECTION_PERCENT && percent <= (1-SELECTION_PERCENT)
end
def over_self?
drag_item.parent == tree_item
end
def updateItem(item, empty)
super(item, empty);
if empty
set_text nil
set_graphic nil
else
if editing?
@text_field.text = get_string if @text_field
set_text nil
set_graphic @text_field
else
set_text get_string
set_graphic tree_item.graphic
end
end
end
def drag_item
self.class.drag_item
end
def drop_type
self.class.drop_type
end
def rearrange(event)
parent = tree_item.parent
unless parent # root of tree view
parent = tree_item
where = 0
end
drag_item.parent.children.remove(drag_item)
saved_items = parent.children.to_a
unless where # where already deduced from root being view_item
where = saved_items.find_index { |e| e == tree_item }
where += 1 if y_percentage(event) > SELECTION_PERCENT
end
if (where >= saved_items.size)
parent.children.add(drag_item)
else
parent.children.set(where, drag_item)
where.upto(saved_items.size - 2) do |i|
parent.children.set(i+1, saved_items[i])
end
parent.children.add(saved_items[saved_items.size - 1])
end
end
#### These methods are part of the code to make the tree editable
def startEdit
super
create_text_field unless @text_field
set_text nil
set_graphic @text_field
@text_field.select_all
end
def cancelEdit
super
set_text get_item
set_graphic tree_item.graphic
end
def get_string
get_item ? get_item.to_s : ''
end
def create_text_field
@text_field = TextField.new(get_string)
@text_field.set_on_key_released do |event|
if event.code == KeyCode::ENTER
commitEdit(@text_field.text)
elsif event.code == KeyCode::ESCAPE
cancelEdit
end
end
end
end
class SimpleTreeView < JRubyFX::Application
def start(stage)
with(stage, width: 300, height: 300, title: 'Simple Tree View') do
layout_scene(:blue) do
stack_pane(padding: insets(30)) do
tree_view(editable: true, cell_factory: proc { DraggableTreeCell.new}) do
tree_item("Root") do
5.times {|i| tree_item "File #{i}" }
end
end
end
end
show
end
end
end
SimpleTreeView.launch
| monkstone/jrubyfx | samples/javafx/tree_view.rb | Ruby | apache-2.0 | 4,812 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.datastore.page.encoding.dimension.legacy;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorage;
import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForNoInvertedIndexForShort;
import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForShort;
import org.apache.carbondata.core.datastore.compression.Compressor;
import org.apache.carbondata.core.datastore.compression.CompressorFactory;
import org.apache.carbondata.core.datastore.page.ColumnPage;
import org.apache.carbondata.core.datastore.page.encoding.ColumnPageEncoder;
import org.apache.carbondata.core.util.ByteUtil;
import org.apache.carbondata.format.Encoding;
public class HighCardDictDimensionIndexCodec extends IndexStorageCodec {
/**
* whether this column is varchar data type(long string)
*/
private boolean isVarcharType;
public HighCardDictDimensionIndexCodec(boolean isSort, boolean isInvertedIndex,
boolean isVarcharType) {
super(isSort, isInvertedIndex);
this.isVarcharType = isVarcharType;
}
@Override
public String getName() {
return "HighCardDictDimensionIndexCodec";
}
@Override
public ColumnPageEncoder createEncoder(Map<String, String> parameter) {
return new IndexStorageEncoder() {
@Override
protected void encodeIndexStorage(ColumnPage input) {
BlockIndexerStorage<byte[][]> indexStorage;
byte[][] data = input.getByteArrayPage();
boolean isDictionary = input.isLocalDictGeneratedPage();
if (isInvertedIndex) {
indexStorage = new BlockIndexerStorageForShort(data, isDictionary, !isDictionary, isSort);
} else {
indexStorage =
new BlockIndexerStorageForNoInvertedIndexForShort(data, isDictionary);
}
byte[] flattened = ByteUtil.flatten(indexStorage.getDataPage());
Compressor compressor = CompressorFactory.getInstance().getCompressor(
input.getColumnCompressorName());
super.compressedDataPage = compressor.compressByte(flattened);
super.indexStorage = indexStorage;
}
@Override
protected List<Encoding> getEncodingList() {
List<Encoding> encodings = new ArrayList<>();
if (isVarcharType) {
encodings.add(Encoding.DIRECT_COMPRESS_VARCHAR);
} else if (indexStorage.getRowIdPageLengthInBytes() > 0) {
encodings.add(Encoding.INVERTED_INDEX);
}
if (indexStorage.getDataRlePageLengthInBytes() > 0) {
encodings.add(Encoding.RLE);
}
return encodings;
}
};
}
}
| ravipesala/incubator-carbondata | core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/dimension/legacy/HighCardDictDimensionIndexCodec.java | Java | apache-2.0 | 3,503 |
package org.zstack.sdk;
import org.zstack.sdk.ImageInventory;
public class BackupStorageMigrateImageResult {
public ImageInventory inventory;
public void setInventory(ImageInventory inventory) {
this.inventory = inventory;
}
public ImageInventory getInventory() {
return this.inventory;
}
}
| zstackorg/zstack | sdk/src/main/java/org/zstack/sdk/BackupStorageMigrateImageResult.java | Java | apache-2.0 | 330 |
/*
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Rhino code, released
* May 6, 1999.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1997-1999
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Bob Jervis
* Google Inc.
*
* Alternatively, the contents of this file may be used under the terms of
* the GNU General Public License Version 2 or later (the "GPL"), in which
* case the provisions of the GPL are applicable instead of those above. If
* you wish to allow use of your version of this file only under the terms of
* the GPL and not to allow others to use your version of this file under the
* MPL, indicate your decision by deleting the provisions above and replacing
* them with the notice and other provisions required by the GPL. If you do
* not delete the provisions above, a recipient may use your version of this
* file under either the MPL or the GPL.
*
* ***** END LICENSE BLOCK ***** */
package com.google.javascript.rhino;
import java.util.ArrayList;
import java.util.List;
/**
* A simple {@link ErrorReporter} that collects warnings and errors and makes
* them accessible via {@link #errors()} and {@link #warnings()}.
*
*
*/
public class SimpleErrorReporter implements ErrorReporter {
private List<String> warnings = null;
private List<String> errors = null;
public void warning(String message, String sourceName, int line,
String lineSource, int lineOffset)
{
if (warnings == null) {
warnings = new ArrayList<String>();
}
warnings.add(formatDetailedMessage(
message, sourceName, line, lineSource, lineOffset));
}
public void error(String message, String sourceName, int line,
String lineSource, int lineOffset)
{
if (errors == null) {
errors = new ArrayList<String>();
}
errors.add(formatDetailedMessage(
message, sourceName, line, lineSource, lineOffset));
}
public EvaluatorException runtimeError(
String message, String sourceName, int line, String lineSource,
int lineOffset)
{
return new EvaluatorException(
message, sourceName, line, lineSource, lineOffset);
}
/**
* Returns the list of errors, or {@code null} if there were none.
*/
public List<String> errors()
{
return errors;
}
/**
* Returns the list of warnings, or {@code null} if there were none.
*/
public List<String> warnings()
{
return warnings;
}
private String formatDetailedMessage(
String message, String sourceName, int line, String lineSource,
int lineOffset)
{
RhinoException e = new RhinoException(message);
if (sourceName != null) {
e.initSourceName(sourceName);
}
if (lineSource != null) {
e.initLineSource(lineSource);
}
if (line > 0) {
e.initLineNumber(line);
}
if (lineOffset > 0) {
e.initColumnNumber(lineOffset);
}
return e.getMessage();
}
}
| ehsan/js-symbolic-executor | closure-compiler/src/com/google/javascript/rhino/SimpleErrorReporter.java | Java | apache-2.0 | 3,775 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import static org.apache.hadoop.fs.permission.AclEntryScope.*;
import static org.apache.hadoop.fs.permission.AclEntryType.*;
import static org.apache.hadoop.fs.permission.FsAction.*;
import static org.apache.hadoop.hdfs.server.namenode.AclTestHelpers.*;
import static org.apache.hadoop.test.MetricsAsserts.assertCounter;
import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.RandomAccessFile;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ChecksumException;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.permission.PermissionStatus;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSInotifyEventInputStream;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory;
import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeDirType;
import org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeMetrics;
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
import org.apache.hadoop.hdfs.util.XMLUtils.InvalidXmlException;
import org.apache.hadoop.hdfs.util.XMLUtils.Stanza;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.PathUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Time;
import org.apache.log4j.Level;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.LogManager;
import org.apache.log4j.spi.LoggingEvent;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.mockito.Mockito;
import org.xml.sax.ContentHandler;
import org.xml.sax.SAXException;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
/**
* This class tests the creation and validation of a checkpoint.
*/
@RunWith(Parameterized.class)
public class TestEditLog {
static {
GenericTestUtils.setLogLevel(FSEditLog.LOG, Level.ALL);
}
@Parameters
public static Collection<Object[]> data() {
Collection<Object[]> params = new ArrayList<Object[]>();
params.add(new Object[]{ Boolean.FALSE });
params.add(new Object[]{ Boolean.TRUE });
return params;
}
private static boolean useAsyncEditLog;
public TestEditLog(Boolean async) {
useAsyncEditLog = async;
}
public static Configuration getConf() {
Configuration conf = new HdfsConfiguration();
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_EDITS_ASYNC_LOGGING,
useAsyncEditLog);
return conf;
}
/**
* A garbage mkdir op which is used for testing
* {@link EditLogFileInputStream#scanEditLog(File, long, boolean)}
*/
public static class GarbageMkdirOp extends FSEditLogOp {
public GarbageMkdirOp() {
super(FSEditLogOpCodes.OP_MKDIR);
}
@Override
void resetSubFields() {
// nop
}
@Override
void readFields(DataInputStream in, int logVersion) throws IOException {
throw new IOException("cannot decode GarbageMkdirOp");
}
@Override
public void writeFields(DataOutputStream out) throws IOException {
// write in some garbage content
Random random = new Random();
byte[] content = new byte[random.nextInt(16) + 1];
random.nextBytes(content);
out.write(content);
}
@Override
protected void toXml(ContentHandler contentHandler) throws SAXException {
throw new UnsupportedOperationException(
"Not supported for GarbageMkdirOp");
}
@Override
void fromXml(Stanza st) throws InvalidXmlException {
throw new UnsupportedOperationException(
"Not supported for GarbageMkdirOp");
}
}
static final Log LOG = LogFactory.getLog(TestEditLog.class);
static final int NUM_DATA_NODES = 0;
// This test creates NUM_THREADS threads and each thread does
// 2 * NUM_TRANSACTIONS Transactions concurrently.
static final int NUM_TRANSACTIONS = 100;
static final int NUM_THREADS = 100;
static final File TEST_DIR = PathUtils.getTestDir(TestEditLog.class);
/** An edits log with 3 edits from 0.20 - the result of
* a fresh namesystem followed by hadoop fs -touchz /myfile */
static final byte[] HADOOP20_SOME_EDITS =
StringUtils.hexStringToByte((
"ffff ffed 0a00 0000 0000 03fa e100 0000" +
"0005 0007 2f6d 7966 696c 6500 0133 000d" +
"3132 3932 3331 3634 3034 3138 3400 0d31" +
"3239 3233 3136 3430 3431 3834 0009 3133" +
"3432 3137 3732 3800 0000 0004 746f 6464" +
"0a73 7570 6572 6772 6f75 7001 a400 1544" +
"4653 436c 6965 6e74 5f2d 3136 3136 3535" +
"3738 3931 000b 3137 322e 3239 2e35 2e33" +
"3209 0000 0005 0007 2f6d 7966 696c 6500" +
"0133 000d 3132 3932 3331 3634 3034 3138" +
"3400 0d31 3239 3233 3136 3430 3431 3834" +
"0009 3133 3432 3137 3732 3800 0000 0004" +
"746f 6464 0a73 7570 6572 6772 6f75 7001" +
"a4ff 0000 0000 0000 0000 0000 0000 0000"
).replace(" ",""));
static {
// No need to fsync for the purposes of tests. This makes
// the tests run much faster.
EditLogFileOutputStream.setShouldSkipFsyncForTesting(true);
}
static final byte TRAILER_BYTE = FSEditLogOpCodes.OP_INVALID.getOpCode();
private static final int CHECKPOINT_ON_STARTUP_MIN_TXNS = 100;
//
// an object that does a bunch of transactions
//
static class Transactions implements Runnable {
final FSNamesystem namesystem;
final int numTransactions;
final short replication = 3;
final long blockSize = 64;
final int startIndex;
Transactions(FSNamesystem ns, int numTx, int startIdx) {
namesystem = ns;
numTransactions = numTx;
startIndex = startIdx;
}
// add a bunch of transactions.
@Override
public void run() {
PermissionStatus p = namesystem.createFsOwnerPermissions(
new FsPermission((short)0777));
FSEditLog editLog = namesystem.getEditLog();
for (int i = 0; i < numTransactions; i++) {
INodeFile inode = new INodeFile(namesystem.dir.allocateNewInodeId(), null,
p, 0L, 0L, BlockInfo.EMPTY_ARRAY, replication, blockSize);
inode.toUnderConstruction("", "");
editLog.logOpenFile("/filename" + (startIndex + i), inode, false, false);
editLog.logCloseFile("/filename" + (startIndex + i), inode);
editLog.logSync();
}
}
}
/**
* Construct FSEditLog with default configuration, taking editDirs from NNStorage
*
* @param storage Storage object used by namenode
*/
private static FSEditLog getFSEditLog(NNStorage storage) throws IOException {
Configuration conf = getConf();
// Make sure the edits dirs are set in the provided configuration object.
conf.set(DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY,
StringUtils.join(",", storage.getEditsDirectories()));
FSEditLog log = FSEditLog.newInstance(
conf, storage, FSNamesystem.getNamespaceEditsDirs(conf));
return log;
}
/**
* Test case for an empty edit log from a prior version of Hadoop.
*/
@Test
public void testPreTxIdEditLogNoEdits() throws Exception {
FSNamesystem namesys = Mockito.mock(FSNamesystem.class);
namesys.dir = Mockito.mock(FSDirectory.class);
long numEdits = testLoad(
StringUtils.hexStringToByte("ffffffed"), // just version number
namesys);
assertEquals(0, numEdits);
}
/**
* Test case for loading a very simple edit log from a format
* prior to the inclusion of edit transaction IDs in the log.
*/
@Test
public void testPreTxidEditLogWithEdits() throws Exception {
Configuration conf = getConf();
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build();
cluster.waitActive();
final FSNamesystem namesystem = cluster.getNamesystem();
long numEdits = testLoad(HADOOP20_SOME_EDITS, namesystem);
assertEquals(3, numEdits);
// Sanity check the edit
HdfsFileStatus fileInfo =
namesystem.getFileInfo("/myfile", false, false, false);
assertEquals("supergroup", fileInfo.getGroup());
assertEquals(3, fileInfo.getReplication());
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
private long testLoad(byte[] data, FSNamesystem namesys) throws IOException {
FSEditLogLoader loader = new FSEditLogLoader(namesys, 0);
return loader.loadFSEdits(new EditLogByteInputStream(data), 1);
}
/**
* Simple test for writing to and rolling the edit log.
*/
@Test
public void testSimpleEditLog() throws IOException {
// start a cluster
Configuration conf = getConf();
MiniDFSCluster cluster = null;
FileSystem fileSys = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES).build();
cluster.waitActive();
fileSys = cluster.getFileSystem();
final FSNamesystem namesystem = cluster.getNamesystem();
FSImage fsimage = namesystem.getFSImage();
final FSEditLog editLog = fsimage.getEditLog();
assertExistsInStorageDirs(
cluster, NameNodeDirType.EDITS,
NNStorage.getInProgressEditsFileName(1));
editLog.logSetReplication("fakefile", (short) 1);
editLog.logSync();
editLog.rollEditLog(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION);
assertExistsInStorageDirs(
cluster, NameNodeDirType.EDITS,
NNStorage.getFinalizedEditsFileName(1,3));
assertExistsInStorageDirs(
cluster, NameNodeDirType.EDITS,
NNStorage.getInProgressEditsFileName(4));
editLog.logSetReplication("fakefile", (short) 2);
editLog.logSync();
editLog.close();
} finally {
if(fileSys != null) fileSys.close();
if(cluster != null) cluster.shutdown();
}
}
/**
* Tests transaction logging in dfs.
*/
@Test
public void testMultiThreadedEditLog() throws IOException {
testEditLog(2048);
// force edit buffer to automatically sync on each log of edit log entry
testEditLog(1);
}
private void assertExistsInStorageDirs(MiniDFSCluster cluster,
NameNodeDirType dirType,
String filename) {
NNStorage storage = cluster.getNamesystem().getFSImage().getStorage();
for (StorageDirectory sd : storage.dirIterable(dirType)) {
File f = new File(sd.getCurrentDir(), filename);
assertTrue("Expect that " + f + " exists", f.exists());
}
}
/**
* Test edit log with different initial buffer size
*
* @param initialSize initial edit log buffer size
* @throws IOException
*/
private void testEditLog(int initialSize) throws IOException {
// start a cluster
Configuration conf = getConf();
MiniDFSCluster cluster = null;
FileSystem fileSys = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES).build();
cluster.waitActive();
fileSys = cluster.getFileSystem();
final FSNamesystem namesystem = cluster.getNamesystem();
for (Iterator<URI> it = cluster.getNameDirs(0).iterator(); it.hasNext(); ) {
File dir = new File(it.next().getPath());
System.out.println(dir);
}
FSImage fsimage = namesystem.getFSImage();
FSEditLog editLog = fsimage.getEditLog();
// set small size of flush buffer
editLog.setOutputBufferCapacity(initialSize);
// Roll log so new output buffer size takes effect
// we should now be writing to edits_inprogress_3
fsimage.rollEditLog(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION);
// Remember the current lastInodeId and will reset it back to test
// loading editlog segments.The transactions in the following allocate new
// inode id to write to editlogs but doesn't create ionde in namespace
long originalLastInodeId = namesystem.dir.getLastInodeId();
// Create threads and make them run transactions concurrently.
Thread threadId[] = new Thread[NUM_THREADS];
for (int i = 0; i < NUM_THREADS; i++) {
Transactions trans =
new Transactions(namesystem, NUM_TRANSACTIONS, i*NUM_TRANSACTIONS);
threadId[i] = new Thread(trans, "TransactionThread-" + i);
threadId[i].start();
}
// wait for all transactions to get over
for (int i = 0; i < NUM_THREADS; i++) {
try {
threadId[i].join();
} catch (InterruptedException e) {
i--; // retry
}
}
// Reopen some files as for append
Transactions trans =
new Transactions(namesystem, NUM_TRANSACTIONS, NUM_TRANSACTIONS / 2);
trans.run();
// Roll another time to finalize edits_inprogress_3
fsimage.rollEditLog(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION);
long expectedTxns = ((NUM_THREADS+1) * 2 * NUM_TRANSACTIONS) + 2; // +2 for start/end txns
// Verify that we can read in all the transactions that we have written.
// If there were any corruptions, it is likely that the reading in
// of these transactions will throw an exception.
//
namesystem.dir.resetLastInodeIdWithoutChecking(originalLastInodeId);
for (Iterator<StorageDirectory> it =
fsimage.getStorage().dirIterator(NameNodeDirType.EDITS); it.hasNext();) {
FSEditLogLoader loader = new FSEditLogLoader(namesystem, 0);
File editFile = NNStorage.getFinalizedEditsFile(it.next(), 3,
3 + expectedTxns - 1);
assertTrue("Expect " + editFile + " exists", editFile.exists());
System.out.println("Verifying file: " + editFile);
long numEdits = loader.loadFSEdits(
new EditLogFileInputStream(editFile), 3);
int numLeases = namesystem.leaseManager.countLease();
System.out.println("Number of outstanding leases " + numLeases);
assertEquals(0, numLeases);
assertTrue("Verification for " + editFile + " failed. " +
"Expected " + expectedTxns + " transactions. "+
"Found " + numEdits + " transactions.",
numEdits == expectedTxns);
}
} finally {
try {
if(fileSys != null) fileSys.close();
if(cluster != null) cluster.shutdown();
} catch (Throwable t) {
LOG.error("Couldn't shut down cleanly", t);
}
}
}
private void doLogEdit(ExecutorService exec, final FSEditLog log,
final String filename) throws Exception
{
exec.submit(new Callable<Void>() {
@Override
public Void call() {
log.logSetReplication(filename, (short)1);
return null;
}
}).get();
}
private void doCallLogSync(ExecutorService exec, final FSEditLog log)
throws Exception
{
exec.submit(new Callable<Void>() {
@Override
public Void call() {
log.logSync();
return null;
}
}).get();
}
private void doCallLogSyncAll(ExecutorService exec, final FSEditLog log)
throws Exception
{
exec.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
log.logSyncAll();
return null;
}
}).get();
}
@Test
public void testSyncBatching() throws Exception {
if (useAsyncEditLog) {
// semantics are completely differently since edits will be auto-synced
return;
}
// start a cluster
Configuration conf = getConf();
MiniDFSCluster cluster = null;
FileSystem fileSys = null;
ExecutorService threadA = Executors.newSingleThreadExecutor();
ExecutorService threadB = Executors.newSingleThreadExecutor();
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES).build();
cluster.waitActive();
fileSys = cluster.getFileSystem();
final FSNamesystem namesystem = cluster.getNamesystem();
FSImage fsimage = namesystem.getFSImage();
final FSEditLog editLog = fsimage.getEditLog();
assertEquals("should start with only the BEGIN_LOG_SEGMENT txn synced",
1, editLog.getSyncTxId());
// Log an edit from thread A
doLogEdit(threadA, editLog, "thread-a 1");
assertEquals("logging edit without syncing should do not affect txid",
1, editLog.getSyncTxId());
// Log an edit from thread B
doLogEdit(threadB, editLog, "thread-b 1");
assertEquals("logging edit without syncing should do not affect txid",
1, editLog.getSyncTxId());
// Now ask to sync edit from B, which should sync both edits.
doCallLogSync(threadB, editLog);
assertEquals("logSync from second thread should bump txid up to 3",
3, editLog.getSyncTxId());
// Now ask to sync edit from A, which was already batched in - thus
// it should increment the batch count metric
doCallLogSync(threadA, editLog);
assertEquals("logSync from first thread shouldn't change txid",
3, editLog.getSyncTxId());
//Should have incremented the batch count exactly once
assertCounter("TransactionsBatchedInSync", 1L,
getMetrics("NameNodeActivity"));
} finally {
threadA.shutdown();
threadB.shutdown();
if(fileSys != null) fileSys.close();
if(cluster != null) cluster.shutdown();
}
}
/**
* Test what happens with the following sequence:
*
* Thread A writes edit
* Thread B calls logSyncAll
* calls close() on stream
* Thread A calls logSync
*
* This sequence is legal and can occur if enterSafeMode() is closely
* followed by saveNamespace.
*/
@Test
public void testBatchedSyncWithClosedLogs() throws Exception {
// start a cluster
Configuration conf = getConf();
MiniDFSCluster cluster = null;
FileSystem fileSys = null;
ExecutorService threadA = Executors.newSingleThreadExecutor();
ExecutorService threadB = Executors.newSingleThreadExecutor();
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES).build();
cluster.waitActive();
fileSys = cluster.getFileSystem();
final FSNamesystem namesystem = cluster.getNamesystem();
FSImage fsimage = namesystem.getFSImage();
final FSEditLog editLog = fsimage.getEditLog();
// Log an edit from thread A
doLogEdit(threadA, editLog, "thread-a 1");
// async log is doing batched syncs in background. logSync just ensures
// the edit is durable, so the txid may increase prior to sync
if (!useAsyncEditLog) {
assertEquals("logging edit without syncing should do not affect txid",
1, editLog.getSyncTxId());
}
// logSyncAll in Thread B
doCallLogSyncAll(threadB, editLog);
assertEquals("logSyncAll should sync thread A's transaction",
2, editLog.getSyncTxId());
// Close edit log
editLog.close();
// Ask thread A to finish sync (which should be a no-op)
doCallLogSync(threadA, editLog);
} finally {
threadA.shutdown();
threadB.shutdown();
if(fileSys != null) fileSys.close();
if(cluster != null) cluster.shutdown();
}
}
@Test
public void testEditChecksum() throws Exception {
// start a cluster
Configuration conf = getConf();
MiniDFSCluster cluster = null;
FileSystem fileSys = null;
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES).build();
cluster.waitActive();
fileSys = cluster.getFileSystem();
final FSNamesystem namesystem = cluster.getNamesystem();
FSImage fsimage = namesystem.getFSImage();
final FSEditLog editLog = fsimage.getEditLog();
fileSys.mkdirs(new Path("/tmp"));
Iterator<StorageDirectory> iter = fsimage.getStorage().
dirIterator(NameNodeDirType.EDITS);
LinkedList<StorageDirectory> sds = new LinkedList<StorageDirectory>();
while (iter.hasNext()) {
sds.add(iter.next());
}
editLog.close();
cluster.shutdown();
for (StorageDirectory sd : sds) {
File editFile = NNStorage.getFinalizedEditsFile(sd, 1, 3);
assertTrue(editFile.exists());
long fileLen = editFile.length();
LOG.debug("Corrupting Log File: " + editFile + " len: " + fileLen);
RandomAccessFile rwf = new RandomAccessFile(editFile, "rw");
rwf.seek(fileLen-4); // seek to checksum bytes
int b = rwf.readInt();
rwf.seek(fileLen-4);
rwf.writeInt(b+1);
rwf.close();
}
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES).format(false).build();
fail("should not be able to start");
} catch (IOException e) {
// expected
assertNotNull("Cause of exception should be ChecksumException", e.getCause());
assertEquals("Cause of exception should be ChecksumException",
ChecksumException.class, e.getCause().getClass());
}
}
/**
* Test what happens if the NN crashes when it has has started but
* had no transactions written.
*/
@Test
public void testCrashRecoveryNoTransactions() throws Exception {
testCrashRecovery(0);
}
/**
* Test what happens if the NN crashes when it has has started and
* had a few transactions written
*/
@Test
public void testCrashRecoveryWithTransactions() throws Exception {
testCrashRecovery(150);
}
/**
* Do a test to make sure the edit log can recover edits even after
* a non-clean shutdown. This does a simulated crash by copying over
* the edits directory while the NN is still running, then shutting it
* down, and restoring that edits directory.
*/
private void testCrashRecovery(int numTransactions) throws Exception {
MiniDFSCluster cluster = null;
Configuration conf = getConf();
conf.setInt(DFSConfigKeys.DFS_NAMENODE_CHECKPOINT_TXNS_KEY,
CHECKPOINT_ON_STARTUP_MIN_TXNS);
try {
LOG.info("\n===========================================\n" +
"Starting empty cluster");
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(NUM_DATA_NODES)
.format(true)
.build();
cluster.waitActive();
FileSystem fs = cluster.getFileSystem();
for (int i = 0; i < numTransactions; i++) {
fs.mkdirs(new Path("/test" + i));
}
// Directory layout looks like:
// test/data/dfs/nameN/current/{fsimage_N,edits_...}
File nameDir = new File(cluster.getNameDirs(0).iterator().next().getPath());
File dfsDir = nameDir.getParentFile();
assertEquals(dfsDir.getName(), "dfs"); // make sure we got right dir
LOG.info("Copying data directory aside to a hot backup");
File backupDir = new File(dfsDir.getParentFile(), "dfs.backup-while-running");
FileUtils.copyDirectory(dfsDir, backupDir);
LOG.info("Shutting down cluster #1");
cluster.shutdown();
cluster = null;
// Now restore the backup
FileUtil.fullyDeleteContents(dfsDir);
dfsDir.delete();
backupDir.renameTo(dfsDir);
// Directory layout looks like:
// test/data/dfs/nameN/current/{fsimage_N,edits_...}
File currentDir = new File(nameDir, "current");
// We should see the file as in-progress
File editsFile = new File(currentDir,
NNStorage.getInProgressEditsFileName(1));
assertTrue("Edits file " + editsFile + " should exist", editsFile.exists());
File imageFile = FSImageTestUtil.findNewestImageFile(
currentDir.getAbsolutePath());
assertNotNull("No image found in " + nameDir, imageFile);
assertEquals(NNStorage.getImageFileName(0), imageFile.getName());
// Try to start a new cluster
LOG.info("\n===========================================\n" +
"Starting same cluster after simulated crash");
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(NUM_DATA_NODES)
.format(false)
.build();
cluster.waitActive();
// We should still have the files we wrote prior to the simulated crash
fs = cluster.getFileSystem();
for (int i = 0; i < numTransactions; i++) {
assertTrue(fs.exists(new Path("/test" + i)));
}
long expectedTxId;
if (numTransactions > CHECKPOINT_ON_STARTUP_MIN_TXNS) {
// It should have saved a checkpoint on startup since there
// were more unfinalized edits than configured
expectedTxId = numTransactions + 1;
} else {
// otherwise, it shouldn't have made a checkpoint
expectedTxId = 0;
}
imageFile = FSImageTestUtil.findNewestImageFile(
currentDir.getAbsolutePath());
assertNotNull("No image found in " + nameDir, imageFile);
assertEquals(NNStorage.getImageFileName(expectedTxId),
imageFile.getName());
// Started successfully. Shut it down and make sure it can restart.
cluster.shutdown();
cluster = null;
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(NUM_DATA_NODES)
.format(false)
.build();
cluster.waitActive();
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
// should succeed - only one corrupt log dir
@Test
public void testCrashRecoveryEmptyLogOneDir() throws Exception {
doTestCrashRecoveryEmptyLog(false, true, true);
}
// should fail - seen_txid updated to 3, but no log dir contains txid 3
@Test
public void testCrashRecoveryEmptyLogBothDirs() throws Exception {
doTestCrashRecoveryEmptyLog(true, true, false);
}
// should succeed - only one corrupt log dir
@Test
public void testCrashRecoveryEmptyLogOneDirNoUpdateSeenTxId()
throws Exception {
doTestCrashRecoveryEmptyLog(false, false, true);
}
// should succeed - both log dirs corrupt, but seen_txid never updated
@Test
public void testCrashRecoveryEmptyLogBothDirsNoUpdateSeenTxId()
throws Exception {
doTestCrashRecoveryEmptyLog(true, false, true);
}
/**
* Test that the NN handles the corruption properly
* after it crashes just after creating an edit log
* (ie before writing START_LOG_SEGMENT). In the case
* that all logs have this problem, it should mark them
* as corrupt instead of trying to finalize them.
*
* @param inBothDirs if true, there will be a truncated log in
* both of the edits directories. If false, the truncated log
* will only be in one of the directories. In both cases, the
* NN should fail to start up, because it's aware that txid 3
* was reached, but unable to find a non-corrupt log starting there.
* @param updateTransactionIdFile if true update the seen_txid file.
* If false, it will not be updated. This will simulate a case where
* the NN crashed between creating the new segment and updating the
* seen_txid file.
* @param shouldSucceed true if the test is expected to succeed.
*/
private void doTestCrashRecoveryEmptyLog(boolean inBothDirs,
boolean updateTransactionIdFile, boolean shouldSucceed)
throws Exception {
// start a cluster
Configuration conf = getConf();
MiniDFSCluster cluster = null;
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(NUM_DATA_NODES).build();
cluster.shutdown();
Collection<URI> editsDirs = cluster.getNameEditsDirs(0);
for (URI uri : editsDirs) {
File dir = new File(uri.getPath());
File currentDir = new File(dir, "current");
// We should start with only the finalized edits_1-2
GenericTestUtils.assertGlobEquals(currentDir, "edits_.*",
NNStorage.getFinalizedEditsFileName(1, 2));
// Make a truncated edits_3_inprogress
File log = new File(currentDir,
NNStorage.getInProgressEditsFileName(3));
EditLogFileOutputStream stream = new EditLogFileOutputStream(conf, log, 1024);
try {
stream.create(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION);
if (!inBothDirs) {
break;
}
NNStorage storage = new NNStorage(conf,
Collections.<URI>emptyList(),
Lists.newArrayList(uri));
if (updateTransactionIdFile) {
storage.writeTransactionIdFileToStorage(3);
}
storage.close();
} finally {
stream.close();
}
}
try {
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(NUM_DATA_NODES).format(false).build();
if (!shouldSucceed) {
fail("Should not have succeeded in startin cluster");
}
} catch (IOException ioe) {
if (shouldSucceed) {
LOG.info("Should have succeeded in starting cluster, but failed", ioe);
throw ioe;
} else {
GenericTestUtils.assertExceptionContains(
"Gap in transactions. Expected to be able to read up until " +
"at least txid 3 but unable to find any edit logs containing " +
"txid 3", ioe);
}
} finally {
cluster.shutdown();
}
}
private static class EditLogByteInputStream extends EditLogInputStream {
private final InputStream input;
private final long len;
private int version;
private FSEditLogOp.Reader reader = null;
private FSEditLogLoader.PositionTrackingInputStream tracker = null;
public EditLogByteInputStream(byte[] data) throws IOException {
len = data.length;
input = new ByteArrayInputStream(data);
BufferedInputStream bin = new BufferedInputStream(input);
DataInputStream in = new DataInputStream(bin);
version = EditLogFileInputStream.readLogVersion(in, true);
tracker = new FSEditLogLoader.PositionTrackingInputStream(in);
in = new DataInputStream(tracker);
reader = FSEditLogOp.Reader.create(in, tracker, version);
}
@Override
public long getFirstTxId() {
return HdfsServerConstants.INVALID_TXID;
}
@Override
public long getLastTxId() {
return HdfsServerConstants.INVALID_TXID;
}
@Override
public long length() throws IOException {
return len;
}
@Override
public long getPosition() {
return tracker.getPos();
}
@Override
protected FSEditLogOp nextOp() throws IOException {
return reader.readOp(false);
}
@Override
public int getVersion(boolean verifyVersion) throws IOException {
return version;
}
@Override
public void close() throws IOException {
input.close();
}
@Override
public String getName() {
return "AnonEditLogByteInputStream";
}
@Override
public boolean isInProgress() {
return true;
}
@Override
public void setMaxOpSize(int maxOpSize) {
reader.setMaxOpSize(maxOpSize);
}
@Override public boolean isLocalLog() {
return true;
}
}
@Test
public void testFailedOpen() throws Exception {
File logDir = new File(TEST_DIR, "testFailedOpen");
logDir.mkdirs();
FSEditLog log = FSImageTestUtil.createStandaloneEditLog(logDir);
try {
FileUtil.setWritable(logDir, false);
log.openForWrite(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION);
fail("Did no throw exception on only having a bad dir");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains(
"too few journals successfully started", ioe);
} finally {
FileUtil.setWritable(logDir, true);
log.close();
}
}
/**
* Regression test for HDFS-1112/HDFS-3020. Ensures that, even if
* logSync isn't called periodically, the edit log will sync itself.
*/
@Test
public void testAutoSync() throws Exception {
File logDir = new File(TEST_DIR, "testAutoSync");
logDir.mkdirs();
FSEditLog log = FSImageTestUtil.createStandaloneEditLog(logDir);
String oneKB = StringUtils.byteToHexString(
new byte[500]);
try {
log.openForWrite(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION);
NameNodeMetrics mockMetrics = Mockito.mock(NameNodeMetrics.class);
log.setMetricsForTests(mockMetrics);
for (int i = 0; i < 400; i++) {
log.logDelete(oneKB, 1L, false);
}
// After ~400KB, we're still within the 512KB buffer size
Mockito.verify(mockMetrics, Mockito.times(0)).addSync(Mockito.anyLong());
// After ~400KB more, we should have done an automatic sync
for (int i = 0; i < 400; i++) {
log.logDelete(oneKB, 1L, false);
}
Mockito.verify(mockMetrics, Mockito.times(1)).addSync(Mockito.anyLong());
} finally {
log.close();
}
}
/**
* Tests the getEditLogManifest function using mock storage for a number
* of different situations.
*/
@Test
public void testEditLogManifestMocks() throws IOException {
NNStorage storage;
FSEditLog log;
// Simple case - different directories have the same
// set of logs, with an in-progress one at end
storage = mockStorageWithEdits(
"[1,100]|[101,200]|[201,]",
"[1,100]|[101,200]|[201,]");
log = getFSEditLog(storage);
log.initJournalsForWrite();
assertEquals("[[1,100], [101,200]] CommittedTxId: 200",
log.getEditLogManifest(1).toString());
assertEquals("[[101,200]] CommittedTxId: 200",
log.getEditLogManifest(101).toString());
// Another simple case, different directories have different
// sets of files
storage = mockStorageWithEdits(
"[1,100]|[101,200]",
"[1,100]|[201,300]|[301,400]"); // nothing starting at 101
log = getFSEditLog(storage);
log.initJournalsForWrite();
assertEquals("[[1,100], [101,200], [201,300], [301,400]]" +
" CommittedTxId: 400", log.getEditLogManifest(1).toString());
// Case where one directory has an earlier finalized log, followed
// by a gap. The returned manifest should start after the gap.
storage = mockStorageWithEdits(
"[1,100]|[301,400]", // gap from 101 to 300
"[301,400]|[401,500]");
log = getFSEditLog(storage);
log.initJournalsForWrite();
assertEquals("[[301,400], [401,500]] CommittedTxId: 500",
log.getEditLogManifest(1).toString());
// Case where different directories have different length logs
// starting at the same txid - should pick the longer one
storage = mockStorageWithEdits(
"[1,100]|[101,150]", // short log at 101
"[1,50]|[101,200]"); // short log at 1
log = getFSEditLog(storage);
log.initJournalsForWrite();
assertEquals("[[1,100], [101,200]] CommittedTxId: 200",
log.getEditLogManifest(1).toString());
assertEquals("[[101,200]] CommittedTxId: 200",
log.getEditLogManifest(101).toString());
// Case where the first storage has an inprogress while
// the second has finalised that file (i.e. the first failed
// recently)
storage = mockStorageWithEdits(
"[1,100]|[101,]",
"[1,100]|[101,200]");
log = getFSEditLog(storage);
log.initJournalsForWrite();
assertEquals("[[1,100], [101,200]] CommittedTxId: 200",
log.getEditLogManifest(1).toString());
assertEquals("[[101,200]] CommittedTxId: 200",
log.getEditLogManifest(101).toString());
}
/**
* Create a mock NNStorage object with several directories, each directory
* holding edit logs according to a specification. Each directory
* is specified by a pipe-separated string. For example:
* <code>[1,100]|[101,200]</code> specifies a directory which
* includes two finalized segments, one from 1-100, and one from 101-200.
* The syntax <code>[1,]</code> specifies an in-progress log starting at
* txid 1.
*/
private NNStorage mockStorageWithEdits(String... editsDirSpecs) throws IOException {
List<StorageDirectory> sds = Lists.newArrayList();
List<URI> uris = Lists.newArrayList();
NNStorage storage = Mockito.mock(NNStorage.class);
for (String dirSpec : editsDirSpecs) {
List<String> files = Lists.newArrayList();
String[] logSpecs = dirSpec.split("\\|");
for (String logSpec : logSpecs) {
Matcher m = Pattern.compile("\\[(\\d+),(\\d+)?\\]").matcher(logSpec);
assertTrue("bad spec: " + logSpec, m.matches());
if (m.group(2) == null) {
files.add(NNStorage.getInProgressEditsFileName(
Long.parseLong(m.group(1))));
} else {
files.add(NNStorage.getFinalizedEditsFileName(
Long.parseLong(m.group(1)),
Long.parseLong(m.group(2))));
}
}
StorageDirectory sd = FSImageTestUtil.mockStorageDirectory(
NameNodeDirType.EDITS, false,
files.toArray(new String[0]));
sds.add(sd);
URI u = URI.create("file:///storage"+ Math.random());
Mockito.doReturn(sd).when(storage).getStorageDirectory(u);
uris.add(u);
}
Mockito.doReturn(sds).when(storage).dirIterable(NameNodeDirType.EDITS);
Mockito.doReturn(uris).when(storage).getEditsDirectories();
return storage;
}
/**
* Specification for a failure during #setupEdits
*/
static class AbortSpec {
final int roll;
final int logindex;
/**
* Construct the failure specification.
* @param roll number to fail after. e.g. 1 to fail after the first roll
* @param logindex index of journal to fail.
*/
AbortSpec(int roll, int logindex) {
this.roll = roll;
this.logindex = logindex;
}
}
final static int TXNS_PER_ROLL = 10;
final static int TXNS_PER_FAIL = 2;
/**
* Set up directories for tests.
*
* Each rolled file is 10 txns long.
* A failed file is 2 txns long.
*
* @param editUris directories to create edit logs in
* @param numrolls number of times to roll the edit log during setup
* @param closeOnFinish whether to close the edit log after setup
* @param abortAtRolls Specifications for when to fail, see AbortSpec
*/
public static NNStorage setupEdits(List<URI> editUris, int numrolls,
boolean closeOnFinish, AbortSpec... abortAtRolls) throws IOException {
List<AbortSpec> aborts = new ArrayList<AbortSpec>(Arrays.asList(abortAtRolls));
NNStorage storage = new NNStorage(getConf(),
Collections.<URI>emptyList(),
editUris);
storage.format(new NamespaceInfo());
FSEditLog editlog = getFSEditLog(storage);
// open the edit log and add two transactions
// logGenerationStamp is used, simply because it doesn't
// require complex arguments.
editlog.initJournalsForWrite();
editlog.openForWrite(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION);
for (int i = 2; i < TXNS_PER_ROLL; i++) {
editlog.logGenerationStamp((long) 0);
}
editlog.logSync();
// Go into edit log rolling loop.
// On each roll, the abortAtRolls abort specs are
// checked to see if an abort is required. If so the
// the specified journal is aborted. It will be brought
// back into rotation automatically by rollEditLog
for (int i = 0; i < numrolls; i++) {
editlog.rollEditLog(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION);
editlog.logGenerationStamp((long) i);
editlog.logSync();
while (aborts.size() > 0
&& aborts.get(0).roll == (i+1)) {
AbortSpec spec = aborts.remove(0);
editlog.getJournals().get(spec.logindex).abort();
}
for (int j = 3; j < TXNS_PER_ROLL; j++) {
editlog.logGenerationStamp((long) i);
}
editlog.logSync();
}
if (closeOnFinish) {
editlog.close();
}
FSImageTestUtil.logStorageContents(LOG, storage);
return storage;
}
/**
* Set up directories for tests.
*
* Each rolled file is 10 txns long.
* A failed file is 2 txns long.
*
* @param editUris directories to create edit logs in
* @param numrolls number of times to roll the edit log during setup
* @param abortAtRolls Specifications for when to fail, see AbortSpec
*/
public static NNStorage setupEdits(List<URI> editUris, int numrolls,
AbortSpec... abortAtRolls) throws IOException {
return setupEdits(editUris, numrolls, true, abortAtRolls);
}
/**
* Test loading an editlog which has had both its storage fail
* on alternating rolls. Two edit log directories are created.
* The first one fails on odd rolls, the second on even. Test
* that we are able to load the entire editlog regardless.
*/
@Test
public void testAlternatingJournalFailure() throws IOException {
File f1 = new File(TEST_DIR + "/alternatingjournaltest0");
File f2 = new File(TEST_DIR + "/alternatingjournaltest1");
List<URI> editUris = ImmutableList.of(f1.toURI(), f2.toURI());
NNStorage storage = setupEdits(editUris, 10,
new AbortSpec(1, 0),
new AbortSpec(2, 1),
new AbortSpec(3, 0),
new AbortSpec(4, 1),
new AbortSpec(5, 0),
new AbortSpec(6, 1),
new AbortSpec(7, 0),
new AbortSpec(8, 1),
new AbortSpec(9, 0),
new AbortSpec(10, 1));
long totaltxnread = 0;
FSEditLog editlog = getFSEditLog(storage);
editlog.initJournalsForWrite();
long startTxId = 1;
Iterable<EditLogInputStream> editStreams = editlog.selectInputStreams(startTxId,
TXNS_PER_ROLL*11);
for (EditLogInputStream edits : editStreams) {
FSEditLogLoader.EditLogValidation val =
FSEditLogLoader.scanEditLog(edits, Long.MAX_VALUE);
long read = (val.getEndTxId() - edits.getFirstTxId()) + 1;
LOG.info("Loading edits " + edits + " read " + read);
assertEquals(startTxId, edits.getFirstTxId());
startTxId += read;
totaltxnread += read;
}
editlog.close();
storage.close();
assertEquals(TXNS_PER_ROLL*11, totaltxnread);
}
/**
* Test loading an editlog with gaps. A single editlog directory
* is set up. On of the edit log files is deleted. This should
* fail when selecting the input streams as it will not be able
* to select enough streams to load up to 4*TXNS_PER_ROLL.
* There should be 4*TXNS_PER_ROLL transactions as we rolled 3
* times.
*/
@Test
public void testLoadingWithGaps() throws IOException {
File f1 = new File(TEST_DIR + "/gaptest0");
List<URI> editUris = ImmutableList.of(f1.toURI());
NNStorage storage = setupEdits(editUris, 3);
final long startGapTxId = 1*TXNS_PER_ROLL + 1;
final long endGapTxId = 2*TXNS_PER_ROLL;
File[] files = new File(f1, "current").listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
if (name.startsWith(NNStorage.getFinalizedEditsFileName(startGapTxId,
endGapTxId))) {
return true;
}
return false;
}
});
assertEquals(1, files.length);
assertTrue(files[0].delete());
FSEditLog editlog = getFSEditLog(storage);
editlog.initJournalsForWrite();
long startTxId = 1;
try {
editlog.selectInputStreams(startTxId, 4*TXNS_PER_ROLL);
fail("Should have thrown exception");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains(
"Gap in transactions. Expected to be able to read up until " +
"at least txid 40 but unable to find any edit logs containing " +
"txid 11", ioe);
}
}
/**
* Test that we can read from a byte stream without crashing.
*
*/
static void validateNoCrash(byte garbage[]) throws IOException {
final File TEST_LOG_NAME = new File(TEST_DIR, "test_edit_log");
EditLogFileOutputStream elfos = null;
EditLogFileInputStream elfis = null;
try {
elfos = new EditLogFileOutputStream(getConf(), TEST_LOG_NAME, 0);
elfos.create(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION);
elfos.writeRaw(garbage, 0, garbage.length);
elfos.setReadyToFlush();
elfos.flushAndSync(true);
elfos.close();
elfos = null;
elfis = new EditLogFileInputStream(TEST_LOG_NAME);
// verify that we can read everything without killing the JVM or
// throwing an exception other than IOException
try {
while (true) {
FSEditLogOp op = elfis.readOp();
if (op == null)
break;
}
} catch (IOException e) {
} catch (Throwable t) {
fail("Caught non-IOException throwable " +
StringUtils.stringifyException(t));
}
} finally {
if ((elfos != null) && (elfos.isOpen()))
elfos.close();
if (elfis != null)
elfis.close();
}
}
static byte[][] invalidSequenecs = null;
/**
* "Fuzz" test for the edit log.
*
* This tests that we can read random garbage from the edit log without
* crashing the JVM or throwing an unchecked exception.
*/
@Test
public void testFuzzSequences() throws IOException {
final int MAX_GARBAGE_LENGTH = 512;
final int MAX_INVALID_SEQ = 5000;
// The seed to use for our random number generator. When given the same
// seed, Java.util.Random will always produce the same sequence of values.
// This is important because it means that the test is deterministic and
// repeatable on any machine.
final int RANDOM_SEED = 123;
Random r = new Random(RANDOM_SEED);
for (int i = 0; i < MAX_INVALID_SEQ; i++) {
byte[] garbage = new byte[r.nextInt(MAX_GARBAGE_LENGTH)];
r.nextBytes(garbage);
validateNoCrash(garbage);
}
}
private static long readAllEdits(Collection<EditLogInputStream> streams,
long startTxId) throws IOException {
FSEditLogOp op;
long nextTxId = startTxId;
long numTx = 0;
for (EditLogInputStream s : streams) {
while (true) {
op = s.readOp();
if (op == null)
break;
if (op.getTransactionId() != nextTxId) {
throw new IOException("out of order transaction ID! expected " +
nextTxId + " but got " + op.getTransactionId() + " when " +
"reading " + s.getName());
}
numTx++;
nextTxId = op.getTransactionId() + 1;
}
}
return numTx;
}
/**
* Test edit log failover. If a single edit log is missing, other
* edits logs should be used instead.
*/
@Test
public void testEditLogFailOverFromMissing() throws IOException {
File f1 = new File(TEST_DIR + "/failover0");
File f2 = new File(TEST_DIR + "/failover1");
List<URI> editUris = ImmutableList.of(f1.toURI(), f2.toURI());
NNStorage storage = setupEdits(editUris, 3);
final long startErrorTxId = 1*TXNS_PER_ROLL + 1;
final long endErrorTxId = 2*TXNS_PER_ROLL;
File[] files = new File(f1, "current").listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
if (name.startsWith(NNStorage.getFinalizedEditsFileName(startErrorTxId,
endErrorTxId))) {
return true;
}
return false;
}
});
assertEquals(1, files.length);
assertTrue(files[0].delete());
FSEditLog editlog = getFSEditLog(storage);
editlog.initJournalsForWrite();
long startTxId = 1;
Collection<EditLogInputStream> streams = null;
try {
streams = editlog.selectInputStreams(startTxId, 4*TXNS_PER_ROLL);
readAllEdits(streams, startTxId);
} catch (IOException e) {
LOG.error("edit log failover didn't work", e);
fail("Edit log failover didn't work");
} finally {
IOUtils.cleanup(null, streams.toArray(new EditLogInputStream[0]));
}
}
/**
* Test edit log failover from a corrupt edit log
*/
@Test
public void testEditLogFailOverFromCorrupt() throws IOException {
File f1 = new File(TEST_DIR + "/failover0");
File f2 = new File(TEST_DIR + "/failover1");
List<URI> editUris = ImmutableList.of(f1.toURI(), f2.toURI());
NNStorage storage = setupEdits(editUris, 3);
final long startErrorTxId = 1*TXNS_PER_ROLL + 1;
final long endErrorTxId = 2*TXNS_PER_ROLL;
File[] files = new File(f1, "current").listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
if (name.startsWith(NNStorage.getFinalizedEditsFileName(startErrorTxId,
endErrorTxId))) {
return true;
}
return false;
}
});
assertEquals(1, files.length);
long fileLen = files[0].length();
LOG.debug("Corrupting Log File: " + files[0] + " len: " + fileLen);
RandomAccessFile rwf = new RandomAccessFile(files[0], "rw");
rwf.seek(fileLen-4); // seek to checksum bytes
int b = rwf.readInt();
rwf.seek(fileLen-4);
rwf.writeInt(b+1);
rwf.close();
FSEditLog editlog = getFSEditLog(storage);
editlog.initJournalsForWrite();
long startTxId = 1;
Collection<EditLogInputStream> streams = null;
try {
streams = editlog.selectInputStreams(startTxId, 4*TXNS_PER_ROLL);
readAllEdits(streams, startTxId);
} catch (IOException e) {
LOG.error("edit log failover didn't work", e);
fail("Edit log failover didn't work");
} finally {
IOUtils.cleanup(null, streams.toArray(new EditLogInputStream[0]));
}
}
/**
* Test creating a directory with lots and lots of edit log segments
*/
@Test
public void testManyEditLogSegments() throws IOException {
final int NUM_EDIT_LOG_ROLLS = 1000;
// start a cluster
Configuration conf = getConf();
MiniDFSCluster cluster = null;
FileSystem fileSys = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES).build();
cluster.waitActive();
fileSys = cluster.getFileSystem();
final FSNamesystem namesystem = cluster.getNamesystem();
FSImage fsimage = namesystem.getFSImage();
final FSEditLog editLog = fsimage.getEditLog();
for (int i = 0; i < NUM_EDIT_LOG_ROLLS; i++){
editLog.logSetReplication("fakefile" + i, (short)(i % 3));
assertExistsInStorageDirs(
cluster, NameNodeDirType.EDITS,
NNStorage.getInProgressEditsFileName((i * 3) + 1));
editLog.logSync();
editLog.rollEditLog(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION);
assertExistsInStorageDirs(
cluster, NameNodeDirType.EDITS,
NNStorage.getFinalizedEditsFileName((i * 3) + 1, (i * 3) + 3));
}
editLog.close();
} finally {
if(fileSys != null) fileSys.close();
if(cluster != null) cluster.shutdown();
}
// How long does it take to read through all these edit logs?
long startTime = Time.now();
try {
cluster = new MiniDFSCluster.Builder(conf).
numDataNodes(NUM_DATA_NODES).build();
cluster.waitActive();
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
long endTime = Time.now();
double delta = ((float)(endTime - startTime)) / 1000.0;
LOG.info(String.format("loaded %d edit log segments in %.2f seconds",
NUM_EDIT_LOG_ROLLS, delta));
}
/**
* Edit log op instances are cached internally using thread-local storage.
* This test checks that the cached instances are reset in between different
* transactions processed on the same thread, so that we don't accidentally
* apply incorrect attributes to an inode.
*
* @throws IOException if there is an I/O error
*/
@Test
public void testResetThreadLocalCachedOps() throws IOException {
Configuration conf = new HdfsConfiguration();
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, true);
// Set single handler thread, so all transactions hit same thread-local ops.
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HANDLER_COUNT_KEY, 1);
MiniDFSCluster cluster = null;
FileSystem fileSys = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
cluster.waitActive();
fileSys = cluster.getFileSystem();
// Create /dir1 with a default ACL.
Path dir1 = new Path("/dir1");
fileSys.mkdirs(dir1);
List<AclEntry> aclSpec = Lists.newArrayList(
aclEntry(DEFAULT, USER, "foo", READ_EXECUTE));
fileSys.modifyAclEntries(dir1, aclSpec);
// /dir1/dir2 is expected to clone the default ACL.
Path dir2 = new Path("/dir1/dir2");
fileSys.mkdirs(dir2);
// /dir1/file1 is expected to clone the default ACL.
Path file1 = new Path("/dir1/file1");
fileSys.create(file1).close();
// /dir3 is not a child of /dir1, so must not clone the default ACL.
Path dir3 = new Path("/dir3");
fileSys.mkdirs(dir3);
// /file2 is not a child of /dir1, so must not clone the default ACL.
Path file2 = new Path("/file2");
fileSys.create(file2).close();
// Restart and assert the above stated expectations.
IOUtils.cleanup(LOG, fileSys);
cluster.restartNameNode();
fileSys = cluster.getFileSystem();
assertFalse(fileSys.getAclStatus(dir1).getEntries().isEmpty());
assertFalse(fileSys.getAclStatus(dir2).getEntries().isEmpty());
assertFalse(fileSys.getAclStatus(file1).getEntries().isEmpty());
assertTrue(fileSys.getAclStatus(dir3).getEntries().isEmpty());
assertTrue(fileSys.getAclStatus(file2).getEntries().isEmpty());
} finally {
IOUtils.cleanup(LOG, fileSys);
if (cluster != null) {
cluster.shutdown();
}
}
}
class TestAppender extends AppenderSkeleton {
private final List<LoggingEvent> log = new ArrayList<>();
@Override
public boolean requiresLayout() {
return false;
}
@Override
protected void append(final LoggingEvent loggingEvent) {
log.add(loggingEvent);
}
@Override
public void close() {
}
public List<LoggingEvent> getLog() {
return new ArrayList<>(log);
}
}
/**
*
* @throws Exception
*/
@Test
public void testReadActivelyUpdatedLog() throws Exception {
final TestAppender appender = new TestAppender();
LogManager.getRootLogger().addAppender(appender);
Configuration conf = new HdfsConfiguration();
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, true);
// Set single handler thread, so all transactions hit same thread-local ops.
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HANDLER_COUNT_KEY, 1);
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
cluster.waitActive();
FSImage fsimage = cluster.getNamesystem().getFSImage();
StorageDirectory sd = fsimage.getStorage().getStorageDir(0);
final DistributedFileSystem fileSys = cluster.getFileSystem();
DFSInotifyEventInputStream events = fileSys.getInotifyEventStream();
fileSys.mkdirs(new Path("/test"));
fileSys.mkdirs(new Path("/test/dir1"));
fileSys.delete(new Path("/test/dir1"), true);
fsimage.getEditLog().logSync();
fileSys.mkdirs(new Path("/test/dir2"));
final File inProgressEdit = NNStorage.getInProgressEditsFile(sd, 1);
assertTrue(inProgressEdit.exists());
EditLogFileInputStream elis = new EditLogFileInputStream(inProgressEdit);
FSEditLogOp op;
long pos = 0;
while (true) {
op = elis.readOp();
if (op != null && op.opCode != FSEditLogOpCodes.OP_INVALID) {
pos = elis.getPosition();
} else {
break;
}
}
elis.close();
assertTrue(pos > 0);
RandomAccessFile rwf = new RandomAccessFile(inProgressEdit, "rw");
rwf.seek(pos);
assertEquals(rwf.readByte(), (byte) -1);
rwf.seek(pos + 1);
rwf.writeByte(2);
rwf.close();
events.poll();
String pattern = "Caught exception after reading (.*) ops";
Pattern r = Pattern.compile(pattern);
final List<LoggingEvent> log = appender.getLog();
for (LoggingEvent event : log) {
Matcher m = r.matcher(event.getRenderedMessage());
if (m.find()) {
fail("Should not try to read past latest syned edit log op");
}
}
} finally {
if (cluster != null) {
cluster.shutdown();
}
LogManager.getRootLogger().removeAppender(appender);
}
}
}
| dennishuo/hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java | Java | apache-2.0 | 59,670 |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
using Microsoft.Azure.Commands.ResourceManager.Common.ArgumentCompleters;
using Microsoft.Azure.Commands.ResourceManager.Common.Tags;
using Microsoft.Azure.Commands.Sql.Properties;
using Microsoft.Azure.Commands.Sql.Replication.Model;
using Microsoft.Azure.Commands.Sql.Database.Services;
using Microsoft.Rest.Azure;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Management.Automation;
namespace Microsoft.Azure.Commands.Sql.Replication.Cmdlet
{
/// <summary>
/// Cmdlet to create a new Azure SQL Database Copy
/// </summary>
[Cmdlet("New", ResourceManager.Common.AzureRMConstants.AzureRMPrefix + "SqlDatabaseCopy", ConfirmImpact = ConfirmImpact.Low, SupportsShouldProcess = true, DefaultParameterSetName = DtuDatabaseParameterSet), OutputType(typeof(AzureSqlDatabaseCopyModel))]
public class NewAzureSqlDatabaseCopy : AzureSqlDatabaseCopyCmdletBase
{
private const string DtuDatabaseParameterSet = "DtuBasedDatabase";
private const string VcoreDatabaseParameterSet = "VcoreBasedDatabase";
/// <summary>
/// Gets or sets the name of the database to be copied.
/// </summary>
[Parameter(Mandatory = true,
ValueFromPipelineByPropertyName = true,
Position = 2,
HelpMessage = "The name of the Azure SQL Database to be copied.")]
[ValidateNotNullOrEmpty]
public string DatabaseName { get; set; }
/// <summary>
/// Gets or sets the name of the service objective to assign to the Azure SQL Database copy
/// </summary>
[Parameter(ParameterSetName = DtuDatabaseParameterSet, Mandatory = false,
HelpMessage = "The name of the service objective to assign to the Azure SQL Database copy.")]
[ValidateNotNullOrEmpty]
public string ServiceObjectiveName { get; set; }
/// <summary>
/// Gets or sets the name of the Elastic Pool to put the database copy in
/// </summary>
[Parameter(ParameterSetName = DtuDatabaseParameterSet, Mandatory = false,
HelpMessage = "The name of the Elastic Pool to put the database copy in.")]
[ValidateNotNullOrEmpty]
public string ElasticPoolName { get; set; }
/// <summary>
/// Gets or sets the tags associated with the Azure SQL Database Copy
/// </summary>
[Parameter(Mandatory = false,
HelpMessage = "The tags to associate with the Azure SQL Database Copy")]
[Alias("Tag")]
public Hashtable Tags { get; set; }
/// <summary>
/// Gets or sets the name of the resource group of the copy.
/// </summary>
[Parameter(Mandatory = false,
HelpMessage = "The name of the resource group of the copy.")]
[ValidateNotNullOrEmpty]
public string CopyResourceGroupName { get; set; }
/// <summary>
/// Gets or sets the name of the Azure SQL Server of the copy.
/// </summary>
[Parameter(Mandatory = false,
HelpMessage = "The name of the Azure SQL Server of the copy.")]
[ValidateNotNullOrEmpty]
public string CopyServerName { get; set; }
/// <summary>
/// Gets or sets the name of the source database copy.
/// </summary>
[Parameter(Mandatory = true,
HelpMessage = "The name of the Azure SQL Database copy.")]
[ValidateNotNullOrEmpty]
public string CopyDatabaseName { get; set; }
/// <summary>
/// Gets or sets whether or not to run this cmdlet in the background as a job
/// </summary>
[Parameter(Mandatory = false, HelpMessage = "Run cmdlet in the background")]
public SwitchParameter AsJob { get; set; }
/// <summary>
/// Gets or sets the compute generation of the database copy
/// </summary>
[Parameter(ParameterSetName = VcoreDatabaseParameterSet, Mandatory = true,
HelpMessage = "The compute generation to assign to the new copy.")]
[Alias("Family")]
[PSArgumentCompleter("Gen4", "Gen5")]
[ValidateNotNullOrEmpty]
public string ComputeGeneration { get; set; }
/// <summary>
/// Gets or sets the Vcore numbers of the database copy
/// </summary>
[Parameter(ParameterSetName = VcoreDatabaseParameterSet, Mandatory = true,
HelpMessage = "The Vcore numbers of the Azure Sql Database copy.")]
[Alias("Capacity")]
[ValidateNotNullOrEmpty]
public int VCore { get; set; }
/// <summary>
/// Gets or sets the license type for the Azure Sql database
/// </summary>
[Parameter(Mandatory = false,
HelpMessage = "The license type for the Azure Sql database.")]
[PSArgumentCompleter(
Management.Sql.Models.DatabaseLicenseType.LicenseIncluded,
Management.Sql.Models.DatabaseLicenseType.BasePrice)]
public string LicenseType { get; set; }
/// <summary>
/// Overriding to add warning message
/// </summary>
public override void ExecuteCmdlet()
{
base.ExecuteCmdlet();
}
/// <summary>
/// Get the entities from the service
/// </summary>
/// <returns>The list of entities</returns>
protected override IEnumerable<AzureSqlDatabaseCopyModel> GetEntity()
{
string copyResourceGroupName = string.IsNullOrWhiteSpace(this.CopyResourceGroupName) ? this.ResourceGroupName : this.CopyResourceGroupName;
string copyServerName = string.IsNullOrWhiteSpace(this.CopyServerName) ? this.ServerName : this.CopyServerName;
// We try to get the database. Since this is a create copy, we don't want the copy database to exist
try
{
ModelAdapter.GetDatabase(copyResourceGroupName, copyServerName, this.CopyDatabaseName);
}
catch (CloudException ex)
{
if (ex.Response.StatusCode == System.Net.HttpStatusCode.NotFound)
{
// This is what we want. We looked and there is no database with this name.
return null;
}
// Unexpected exception encountered
throw;
}
// The database already exists
throw new PSArgumentException(
string.Format(Resources.DatabaseNameExists, this.CopyDatabaseName, copyServerName),
"CopyDatabaseName");
}
/// <summary>
/// Create the model from user input
/// </summary>
/// <param name="model">Model retrieved from service</param>
/// <returns>The model that was passed in</returns>
protected override IEnumerable<AzureSqlDatabaseCopyModel> ApplyUserInputToModel(IEnumerable<AzureSqlDatabaseCopyModel> model)
{
string copyResourceGroup = string.IsNullOrWhiteSpace(CopyResourceGroupName) ? ResourceGroupName : CopyResourceGroupName;
string copyServer = string.IsNullOrWhiteSpace(CopyServerName) ? ServerName : CopyServerName;
string location = ModelAdapter.GetServerLocation(ResourceGroupName, ServerName);
string copyLocation = copyServer.Equals(ServerName) ? location : ModelAdapter.GetServerLocation(copyResourceGroup, copyServer);
Database.Model.AzureSqlDatabaseModel sourceDb = ModelAdapter.GetDatabase(ResourceGroupName, ServerName, DatabaseName);
List<Model.AzureSqlDatabaseCopyModel> newEntity = new List<AzureSqlDatabaseCopyModel>();
AzureSqlDatabaseCopyModel copyModel = new AzureSqlDatabaseCopyModel()
{
Location = location,
ResourceGroupName = ResourceGroupName,
ServerName = ServerName,
DatabaseName = DatabaseName,
CopyResourceGroupName = copyResourceGroup,
CopyServerName = copyServer,
CopyDatabaseName = CopyDatabaseName,
CopyLocation = copyLocation,
ServiceObjectiveName = ServiceObjectiveName,
ElasticPoolName = ElasticPoolName,
Tags = TagsConversionHelper.CreateTagDictionary(Tags, validate: true),
LicenseType = LicenseType // note: default license type is LicenseIncluded
};
if(ParameterSetName == DtuDatabaseParameterSet)
{
if (!string.IsNullOrWhiteSpace(ServiceObjectiveName))
{
copyModel.SkuName = ServiceObjectiveName;
}
else if(string.IsNullOrWhiteSpace(ElasticPoolName))
{
copyModel.SkuName = sourceDb.CurrentServiceObjectiveName;
copyModel.Edition = sourceDb.Edition;
copyModel.Capacity = sourceDb.Capacity;
copyModel.Family = sourceDb.Family;
}
}
else
{
copyModel.SkuName = AzureSqlDatabaseAdapter.GetDatabaseSkuName(sourceDb.Edition);
copyModel.Edition = sourceDb.Edition;
copyModel.Capacity = VCore;
copyModel.Family = ComputeGeneration;
}
newEntity.Add(copyModel);
return newEntity;
}
/// <summary>
/// Create the new database copy
/// </summary>
/// <param name="entity">The output of apply user input to model</param>
/// <returns>The input entity</returns>
protected override IEnumerable<AzureSqlDatabaseCopyModel> PersistChanges(IEnumerable<AzureSqlDatabaseCopyModel> entity)
{
return new List<AzureSqlDatabaseCopyModel>()
{
ModelAdapter.CopyDatabaseWithNewSdk(entity.First().CopyResourceGroupName, entity.First().CopyServerName, entity.First())
};
}
}
}
| AzureAutomationTeam/azure-powershell | src/ResourceManager/Sql/Commands.Sql/Replication/Cmdlet/NewAzureSqlDatabaseCopy.cs | C# | apache-2.0 | 10,818 |
package cvc3;
import java.util.*;
import java.io.*;
/** Wrapper for a c++ object as a java Object.
see README for details on garbage collection,
i.e. interplay of delete, finalize, and EmbeddedManager to destruct
the embedded c++ object. */
public abstract class Embedded {
// load jni c++ library
static {
System.loadLibrary("cvc3jni");
/*
// for debugging: stop here by waiting for a key press,
// and attach c++ debugger
System.out.println("Loadded cvc3jni");
try {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
br.readLine();
} catch (IOException ioe) {
}
*/
}
/// Attributes
// embedded object
protected Object d_embedded;
// embedded object manager
private final EmbeddedManager d_embeddedManager;
/// Constructor
// initialize with embedded object and EmbeddedManager
// if EmbeddedManager is null then delete must be called before
// Embedded is garbage collected
protected Embedded(Object Embedded, EmbeddedManager embeddedManager) {
//System.out.println("Create: Embedded");
assert(Embedded != null);
d_embedded = Embedded;
d_embeddedManager = embeddedManager;
}
// access to embedded c++ object
public synchronized Object embedded() {
return d_embedded;
}
// access to EmbeddedManager (might be null if none used)
public EmbeddedManager embeddedManager() {
return d_embeddedManager;
}
// check if already destructed
// (or queued for destruction in embeddedManager)
public synchronized boolean isDeleted() {
return (d_embedded == null);
}
// delete embedded object or enqueue it for deletion
public synchronized void delete() throws Cvc3Exception {
if (isDeleted()) return;
// no embedded manager, so should be in main thread:
// destruct right away
if (d_embeddedManager == null) {
EmbeddedManager.jniDelete(d_embedded);
}
// could be in finalizer, so queue in embeddedManager;
// unless the embeddedManager is already deleted,
// then its (and this') ValidityChecker has been delete.
// assuming this is an Expr or a Theorem it's embedded object
// has then already been deleted as well.
else {
synchronized(d_embeddedManager) {
if (!d_embeddedManager.isDeleted()) {
d_embeddedManager.register(this);
}
}
}
d_embedded = null;
}
// ensure that delete is called if finalization occurs
public void finalize() throws Throwable {
try {
// no embeddedManager, so deleted should have been called
if (d_embeddedManager == null) {
if (d_embedded != null) {
assert(false);
// System.out.println("Embedded.Finalizer: should never be called");
throw new Error("Embedded.Finalizer: should never be called");
}
}
else if (!d_embeddedManager.isDeleted()) {
delete();
}
} finally {
super.finalize();
}
}
}
| ehsan/js-symbolic-executor | cvc3/java/src/cvc3/Embedded.java | Java | apache-2.0 | 2,909 |
/*
* Copyright (c) 1997, 2008, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package com.sun.java.swing.plaf.windows;
import java.awt.*;
import java.awt.event.*;
import java.awt.image.*;
import java.lang.ref.*;
import java.util.*;
import javax.swing.plaf.basic.*;
import javax.swing.*;
import javax.swing.plaf.ComponentUI;
import static com.sun.java.swing.plaf.windows.TMSchema.*;
import static com.sun.java.swing.plaf.windows.XPStyle.Skin;
/**
* Windows rendition of the component.
* <p>
* <strong>Warning:</strong>
* Serialized objects of this class will not be compatible with
* future Swing releases. The current serialization support is appropriate
* for short term storage or RMI between applications running the same
* version of Swing. A future release of Swing will provide support for
* long term persistence.
*/
public class WindowsScrollBarUI extends BasicScrollBarUI {
private Grid thumbGrid;
private Grid highlightGrid;
private Dimension horizontalThumbSize;
private Dimension verticalThumbSize;
/**
* Creates a UI for a JScrollBar.
*
* @param c the text field
* @return the UI
*/
public static ComponentUI createUI(JComponent c) {
return new WindowsScrollBarUI();
}
protected void installDefaults() {
super.installDefaults();
XPStyle xp = XPStyle.getXP();
if (xp != null) {
scrollbar.setBorder(null);
horizontalThumbSize = getSize(scrollbar, xp, Part.SBP_THUMBBTNHORZ);
verticalThumbSize = getSize(scrollbar, xp, Part.SBP_THUMBBTNVERT);
} else {
horizontalThumbSize = null;
verticalThumbSize = null;
}
}
private static Dimension getSize(Component component, XPStyle xp, Part part) {
Skin skin = xp.getSkin(component, part);
return new Dimension(skin.getWidth(), skin.getHeight());
}
@Override
protected Dimension getMinimumThumbSize() {
if ((horizontalThumbSize == null) || (verticalThumbSize == null)) {
return super.getMinimumThumbSize();
}
return JScrollBar.HORIZONTAL == scrollbar.getOrientation()
? horizontalThumbSize
: verticalThumbSize;
}
public void uninstallUI(JComponent c) {
super.uninstallUI(c);
thumbGrid = highlightGrid = null;
}
protected void configureScrollBarColors() {
super.configureScrollBarColors();
Color color = UIManager.getColor("ScrollBar.trackForeground");
if (color != null && trackColor != null) {
thumbGrid = Grid.getGrid(color, trackColor);
}
color = UIManager.getColor("ScrollBar.trackHighlightForeground");
if (color != null && trackHighlightColor != null) {
highlightGrid = Grid.getGrid(color, trackHighlightColor);
}
}
protected JButton createDecreaseButton(int orientation) {
return new WindowsArrowButton(orientation,
UIManager.getColor("ScrollBar.thumb"),
UIManager.getColor("ScrollBar.thumbShadow"),
UIManager.getColor("ScrollBar.thumbDarkShadow"),
UIManager.getColor("ScrollBar.thumbHighlight"));
}
protected JButton createIncreaseButton(int orientation) {
return new WindowsArrowButton(orientation,
UIManager.getColor("ScrollBar.thumb"),
UIManager.getColor("ScrollBar.thumbShadow"),
UIManager.getColor("ScrollBar.thumbDarkShadow"),
UIManager.getColor("ScrollBar.thumbHighlight"));
}
/**
* {@inheritDoc}
* @since 1.6
*/
@Override
protected ArrowButtonListener createArrowButtonListener(){
// we need to repaint the entire scrollbar because state change for each
// button causes a state change for the thumb and other button on Vista
if(XPStyle.isVista()) {
return new ArrowButtonListener() {
public void mouseEntered(MouseEvent evt) {
repaint();
super.mouseEntered(evt);
}
public void mouseExited(MouseEvent evt) {
repaint();
super.mouseExited(evt);
}
private void repaint() {
scrollbar.repaint();
}
};
} else {
return super.createArrowButtonListener();
}
}
protected void paintTrack(Graphics g, JComponent c, Rectangle trackBounds){
boolean v = (scrollbar.getOrientation() == JScrollBar.VERTICAL);
XPStyle xp = XPStyle.getXP();
if (xp != null) {
JScrollBar sb = (JScrollBar)c;
State state = State.NORMAL;
// Pending: Implement rollover (hot) and pressed
if (!sb.isEnabled()) {
state = State.DISABLED;
}
Part part = v ? Part.SBP_LOWERTRACKVERT : Part.SBP_LOWERTRACKHORZ;
xp.getSkin(sb, part).paintSkin(g, trackBounds, state);
} else if (thumbGrid == null) {
super.paintTrack(g, c, trackBounds);
}
else {
thumbGrid.paint(g, trackBounds.x, trackBounds.y, trackBounds.width,
trackBounds.height);
if (trackHighlight == DECREASE_HIGHLIGHT) {
paintDecreaseHighlight(g);
}
else if (trackHighlight == INCREASE_HIGHLIGHT) {
paintIncreaseHighlight(g);
}
}
}
protected void paintThumb(Graphics g, JComponent c, Rectangle thumbBounds) {
boolean v = (scrollbar.getOrientation() == JScrollBar.VERTICAL);
XPStyle xp = XPStyle.getXP();
if (xp != null) {
JScrollBar sb = (JScrollBar)c;
State state = State.NORMAL;
if (!sb.isEnabled()) {
state = State.DISABLED;
} else if (isDragging) {
state = State.PRESSED;
} else if (isThumbRollover()) {
state = State.HOT;
} else if (XPStyle.isVista()) {
if ((incrButton != null && incrButton.getModel().isRollover()) ||
(decrButton != null && decrButton.getModel().isRollover())) {
state = State.HOVER;
}
}
// Paint thumb
Part thumbPart = v ? Part.SBP_THUMBBTNVERT : Part.SBP_THUMBBTNHORZ;
xp.getSkin(sb, thumbPart).paintSkin(g, thumbBounds, state);
// Paint gripper
Part gripperPart = v ? Part.SBP_GRIPPERVERT : Part.SBP_GRIPPERHORZ;
Skin skin = xp.getSkin(sb, gripperPart);
Insets gripperInsets = xp.getMargin(c, thumbPart, null, Prop.CONTENTMARGINS);
if (gripperInsets == null ||
(v && (thumbBounds.height - gripperInsets.top -
gripperInsets.bottom >= skin.getHeight())) ||
(!v && (thumbBounds.width - gripperInsets.left -
gripperInsets.right >= skin.getWidth()))) {
skin.paintSkin(g,
thumbBounds.x + (thumbBounds.width - skin.getWidth()) / 2,
thumbBounds.y + (thumbBounds.height - skin.getHeight()) / 2,
skin.getWidth(), skin.getHeight(), state);
}
} else {
super.paintThumb(g, c, thumbBounds);
}
}
protected void paintDecreaseHighlight(Graphics g) {
if (highlightGrid == null) {
super.paintDecreaseHighlight(g);
}
else {
Insets insets = scrollbar.getInsets();
Rectangle thumbR = getThumbBounds();
int x, y, w, h;
if (scrollbar.getOrientation() == JScrollBar.VERTICAL) {
x = insets.left;
y = decrButton.getY() + decrButton.getHeight();
w = scrollbar.getWidth() - (insets.left + insets.right);
h = thumbR.y - y;
}
else {
x = decrButton.getX() + decrButton.getHeight();
y = insets.top;
w = thumbR.x - x;
h = scrollbar.getHeight() - (insets.top + insets.bottom);
}
highlightGrid.paint(g, x, y, w, h);
}
}
protected void paintIncreaseHighlight(Graphics g) {
if (highlightGrid == null) {
super.paintDecreaseHighlight(g);
}
else {
Insets insets = scrollbar.getInsets();
Rectangle thumbR = getThumbBounds();
int x, y, w, h;
if (scrollbar.getOrientation() == JScrollBar.VERTICAL) {
x = insets.left;
y = thumbR.y + thumbR.height;
w = scrollbar.getWidth() - (insets.left + insets.right);
h = incrButton.getY() - y;
}
else {
x = thumbR.x + thumbR.width;
y = insets.top;
w = incrButton.getX() - x;
h = scrollbar.getHeight() - (insets.top + insets.bottom);
}
highlightGrid.paint(g, x, y, w, h);
}
}
/**
* {@inheritDoc}
* @since 1.6
*/
@Override
protected void setThumbRollover(boolean active) {
boolean old = isThumbRollover();
super.setThumbRollover(active);
// we need to repaint the entire scrollbar because state change for thumb
// causes state change for incr and decr buttons on Vista
if(XPStyle.isVista() && active != old) {
scrollbar.repaint();
}
}
/**
* WindowsArrowButton is used for the buttons to position the
* document up/down. It differs from BasicArrowButton in that the
* preferred size is always a square.
*/
private class WindowsArrowButton extends BasicArrowButton {
public WindowsArrowButton(int direction, Color background, Color shadow,
Color darkShadow, Color highlight) {
super(direction, background, shadow, darkShadow, highlight);
}
public WindowsArrowButton(int direction) {
super(direction);
}
public void paint(Graphics g) {
XPStyle xp = XPStyle.getXP();
if (xp != null) {
ButtonModel model = getModel();
Skin skin = xp.getSkin(this, Part.SBP_ARROWBTN);
State state = null;
boolean jointRollover = XPStyle.isVista() && (isThumbRollover() ||
(this == incrButton && decrButton.getModel().isRollover()) ||
(this == decrButton && incrButton.getModel().isRollover()));
// normal, rollover, pressed, disabled
if (model.isArmed() && model.isPressed()) {
switch (direction) {
case NORTH: state = State.UPPRESSED; break;
case SOUTH: state = State.DOWNPRESSED; break;
case WEST: state = State.LEFTPRESSED; break;
case EAST: state = State.RIGHTPRESSED; break;
}
} else if (!model.isEnabled()) {
switch (direction) {
case NORTH: state = State.UPDISABLED; break;
case SOUTH: state = State.DOWNDISABLED; break;
case WEST: state = State.LEFTDISABLED; break;
case EAST: state = State.RIGHTDISABLED; break;
}
} else if (model.isRollover() || model.isPressed()) {
switch (direction) {
case NORTH: state = State.UPHOT; break;
case SOUTH: state = State.DOWNHOT; break;
case WEST: state = State.LEFTHOT; break;
case EAST: state = State.RIGHTHOT; break;
}
} else if (jointRollover) {
switch (direction) {
case NORTH: state = State.UPHOVER; break;
case SOUTH: state = State.DOWNHOVER; break;
case WEST: state = State.LEFTHOVER; break;
case EAST: state = State.RIGHTHOVER; break;
}
} else {
switch (direction) {
case NORTH: state = State.UPNORMAL; break;
case SOUTH: state = State.DOWNNORMAL; break;
case WEST: state = State.LEFTNORMAL; break;
case EAST: state = State.RIGHTNORMAL; break;
}
}
skin.paintSkin(g, 0, 0, getWidth(), getHeight(), state);
} else {
super.paint(g);
}
}
public Dimension getPreferredSize() {
int size = 16;
if (scrollbar != null) {
switch (scrollbar.getOrientation()) {
case JScrollBar.VERTICAL:
size = scrollbar.getWidth();
break;
case JScrollBar.HORIZONTAL:
size = scrollbar.getHeight();
break;
}
size = Math.max(size, 5);
}
return new Dimension(size, size);
}
}
/**
* This should be pulled out into its own class if more classes need to
* use it.
* <p>
* Grid is used to draw the track for windows scrollbars. Grids
* are cached in a HashMap, with the key being the rgb components
* of the foreground/background colors. Further the Grid is held through
* a WeakRef so that it can be freed when no longer needed. As the
* Grid is rather expensive to draw, it is drawn in a BufferedImage.
*/
private static class Grid {
private static final int BUFFER_SIZE = 64;
private static HashMap<String, WeakReference<Grid>> map;
private BufferedImage image;
static {
map = new HashMap<String, WeakReference<Grid>>();
}
public static Grid getGrid(Color fg, Color bg) {
String key = fg.getRGB() + " " + bg.getRGB();
WeakReference<Grid> ref = map.get(key);
Grid grid = (ref == null) ? null : ref.get();
if (grid == null) {
grid = new Grid(fg, bg);
map.put(key, new WeakReference<Grid>(grid));
}
return grid;
}
public Grid(Color fg, Color bg) {
int cmap[] = { fg.getRGB(), bg.getRGB() };
IndexColorModel icm = new IndexColorModel(8, 2, cmap, 0, false, -1,
DataBuffer.TYPE_BYTE);
image = new BufferedImage(BUFFER_SIZE, BUFFER_SIZE,
BufferedImage.TYPE_BYTE_INDEXED, icm);
Graphics g = image.getGraphics();
try {
g.setClip(0, 0, BUFFER_SIZE, BUFFER_SIZE);
paintGrid(g, fg, bg);
}
finally {
g.dispose();
}
}
/**
* Paints the grid into the specified Graphics at the specified
* location.
*/
public void paint(Graphics g, int x, int y, int w, int h) {
Rectangle clipRect = g.getClipBounds();
int minX = Math.max(x, clipRect.x);
int minY = Math.max(y, clipRect.y);
int maxX = Math.min(clipRect.x + clipRect.width, x + w);
int maxY = Math.min(clipRect.y + clipRect.height, y + h);
if (maxX <= minX || maxY <= minY) {
return;
}
int xOffset = (minX - x) % 2;
for (int xCounter = minX; xCounter < maxX;
xCounter += BUFFER_SIZE) {
int yOffset = (minY - y) % 2;
int width = Math.min(BUFFER_SIZE - xOffset,
maxX - xCounter);
for (int yCounter = minY; yCounter < maxY;
yCounter += BUFFER_SIZE) {
int height = Math.min(BUFFER_SIZE - yOffset,
maxY - yCounter);
g.drawImage(image, xCounter, yCounter,
xCounter + width, yCounter + height,
xOffset, yOffset,
xOffset + width, yOffset + height, null);
if (yOffset != 0) {
yCounter -= yOffset;
yOffset = 0;
}
}
if (xOffset != 0) {
xCounter -= xOffset;
xOffset = 0;
}
}
}
/**
* Actually renders the grid into the Graphics <code>g</code>.
*/
private void paintGrid(Graphics g, Color fg, Color bg) {
Rectangle clipRect = g.getClipBounds();
g.setColor(bg);
g.fillRect(clipRect.x, clipRect.y, clipRect.width,
clipRect.height);
g.setColor(fg);
g.translate(clipRect.x, clipRect.y);
int width = clipRect.width;
int height = clipRect.height;
int xCounter = clipRect.x % 2;
for (int end = width - height; xCounter < end; xCounter += 2) {
g.drawLine(xCounter, 0, xCounter + height, height);
}
for (int end = width; xCounter < end; xCounter += 2) {
g.drawLine(xCounter, 0, width, width - xCounter);
}
int yCounter = ((clipRect.x % 2) == 0) ? 2 : 1;
for (int end = height - width; yCounter < end; yCounter += 2) {
g.drawLine(0, yCounter, width, yCounter + width);
}
for (int end = height; yCounter < end; yCounter += 2) {
g.drawLine(0, yCounter, height - yCounter, height);
}
g.translate(-clipRect.x, -clipRect.y);
}
}
}
| shun634501730/java_source_cn | src_en/com/sun/java/swing/plaf/windows/WindowsScrollBarUI.java | Java | apache-2.0 | 18,587 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.thrift.api.udf;
import com.facebook.drift.annotations.ThriftConstructor;
import com.facebook.drift.annotations.ThriftField;
import com.facebook.drift.annotations.ThriftStruct;
import com.google.common.collect.ImmutableList;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
import java.util.List;
import static com.facebook.drift.annotations.ThriftField.Recursiveness.TRUE;
import static com.facebook.drift.annotations.ThriftField.Requiredness.OPTIONAL;
import static java.util.Objects.requireNonNull;
@Immutable
@ThriftStruct
public class UdfExecutionFailureInfo
{
private final String type;
private final String message;
private final UdfExecutionFailureInfo cause;
private final List<UdfExecutionFailureInfo> suppressed;
private final List<String> stack;
@ThriftConstructor
public UdfExecutionFailureInfo(
String type,
String message,
@Nullable UdfExecutionFailureInfo cause,
List<UdfExecutionFailureInfo> suppressed,
List<String> stack)
{
this.type = requireNonNull(type, "type is null");
this.message = requireNonNull(message, "message is null");
this.cause = cause;
this.suppressed = ImmutableList.copyOf(suppressed);
this.stack = ImmutableList.copyOf(stack);
}
@ThriftField(1)
public String getType()
{
return type;
}
@Nullable
@ThriftField(2)
public String getMessage()
{
return message;
}
@Nullable
@ThriftField(value = 3, isRecursive = TRUE, requiredness = OPTIONAL)
public UdfExecutionFailureInfo getCause()
{
return cause;
}
@ThriftField(4)
public List<UdfExecutionFailureInfo> getSuppressed()
{
return suppressed;
}
@ThriftField(5)
public List<String> getStack()
{
return stack;
}
}
| facebook/presto | presto-thrift-api/src/main/java/com/facebook/presto/thrift/api/udf/UdfExecutionFailureInfo.java | Java | apache-2.0 | 2,491 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm.mongodb.topology;
import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.IRichSpout;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Values;
import java.util.Map;
import java.util.Random;
import java.util.UUID;
public class WordSpout implements IRichSpout {
boolean isDistributed;
SpoutOutputCollector collector;
public static final String[] words = new String[] { "apple", "orange", "pineapple", "banana", "watermelon" };
public WordSpout() {
this(true);
}
public WordSpout(boolean isDistributed) {
this.isDistributed = isDistributed;
}
public boolean isDistributed() {
return this.isDistributed;
}
@SuppressWarnings("rawtypes")
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
this.collector = collector;
}
public void close() {
}
public void nextTuple() {
final Random rand = new Random();
final String word = words[rand.nextInt(words.length)];
this.collector.emit(new Values(word), UUID.randomUUID());
Thread.yield();
}
public void ack(Object msgId) {
}
public void fail(Object msgId) {
}
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields("word"));
}
@Override
public void activate() {
}
@Override
public void deactivate() {
}
@Override
public Map<String, Object> getComponentConfiguration() {
return null;
}
}
| dke-knu/i2am | rdma-based-storm/examples/storm-mongodb-examples/src/main/java/org/apache/storm/mongodb/topology/WordSpout.java | Java | apache-2.0 | 2,496 |
//
// MessagePack for Java
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package org.msgpack.core;
import java.math.BigInteger;
/**
* This error is thrown when the user tries to read an integer value
* using a smaller types. For example, calling MessageUnpacker.unpackInt() for an integer value
* that is larger than Integer.MAX_VALUE will cause this exception.
*/
public class MessageIntegerOverflowException
extends MessageTypeException
{
private final BigInteger bigInteger;
public MessageIntegerOverflowException(BigInteger bigInteger)
{
super();
this.bigInteger = bigInteger;
}
public MessageIntegerOverflowException(long value)
{
this(BigInteger.valueOf(value));
}
public MessageIntegerOverflowException(String message, BigInteger bigInteger)
{
super(message);
this.bigInteger = bigInteger;
}
public BigInteger getBigInteger()
{
return bigInteger;
}
@Override
public String getMessage()
{
return bigInteger.toString();
}
}
| jackyglony/msgpack-java | msgpack-core/src/main/java/org/msgpack/core/MessageIntegerOverflowException.java | Java | apache-2.0 | 1,614 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.balancer;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.master.LoadBalancer;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
/**
* Test the load balancer that is created by default.
*/
@Category(MediumTests.class)
public class TestDefaultLoadBalancer extends BalancerTestBase {
private static final Log LOG = LogFactory.getLog(TestDefaultLoadBalancer.class);
private static LoadBalancer loadBalancer;
@BeforeClass
public static void beforeAllTests() throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.regions.slop", "0");
loadBalancer = new DefaultLoadBalancer();
loadBalancer.setConf(conf);
}
// int[testnum][servernumber] -> numregions
int[][] clusterStateMocks = new int[][] {
// 1 node
new int[] { 0 },
new int[] { 1 },
new int[] { 10 },
// 2 node
new int[] { 0, 0 },
new int[] { 2, 0 },
new int[] { 2, 1 },
new int[] { 2, 2 },
new int[] { 2, 3 },
new int[] { 2, 4 },
new int[] { 1, 1 },
new int[] { 0, 1 },
new int[] { 10, 1 },
new int[] { 14, 1432 },
new int[] { 47, 53 },
// 3 node
new int[] { 0, 1, 2 },
new int[] { 1, 2, 3 },
new int[] { 0, 2, 2 },
new int[] { 0, 3, 0 },
new int[] { 0, 4, 0 },
new int[] { 20, 20, 0 },
// 4 node
new int[] { 0, 1, 2, 3 },
new int[] { 4, 0, 0, 0 },
new int[] { 5, 0, 0, 0 },
new int[] { 6, 6, 0, 0 },
new int[] { 6, 2, 0, 0 },
new int[] { 6, 1, 0, 0 },
new int[] { 6, 0, 0, 0 },
new int[] { 4, 4, 4, 7 },
new int[] { 4, 4, 4, 8 },
new int[] { 0, 0, 0, 7 },
// 5 node
new int[] { 1, 1, 1, 1, 4 },
// more nodes
new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
new int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 10 }, new int[] { 6, 6, 5, 6, 6, 6, 6, 6, 6, 1 },
new int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 54 }, new int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 55 },
new int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 56 }, new int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 16 },
new int[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 8 }, new int[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 9 },
new int[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 10 }, new int[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 123 },
new int[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 155 },
new int[] { 0, 0, 144, 1, 1, 1, 1, 1123, 133, 138, 12, 1444 },
new int[] { 0, 0, 144, 1, 0, 4, 1, 1123, 133, 138, 12, 1444 },
new int[] { 1538, 1392, 1561, 1557, 1535, 1553, 1385, 1542, 1619 } };
/**
* Test the load balancing algorithm.
*
* Invariant is that all servers should be hosting either floor(average) or
* ceiling(average)
*
* @throws Exception
*/
@Test
public void testBalanceCluster() throws Exception {
for (int[] mockCluster : clusterStateMocks) {
Map<ServerName, List<HRegionInfo>> servers = mockClusterServers(mockCluster);
List<ServerAndLoad> list = convertToList(servers);
LOG.info("Mock Cluster : " + printMock(list) + " " + printStats(list));
List<RegionPlan> plans = loadBalancer.balanceCluster(servers);
List<ServerAndLoad> balancedCluster = reconcile(list, plans);
LOG.info("Mock Balance : " + printMock(balancedCluster));
assertClusterAsBalanced(balancedCluster);
for (Map.Entry<ServerName, List<HRegionInfo>> entry : servers.entrySet()) {
returnRegions(entry.getValue());
returnServer(entry.getKey());
}
}
}
}
| daidong/DominoHBase | hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java | Java | apache-2.0 | 4,805 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace Microsoft.CodeAnalysis.Host
{
internal class NoOpPersistentStorageService : IChecksummedPersistentStorageService
{
public static readonly IPersistentStorageService Instance = new NoOpPersistentStorageService();
private NoOpPersistentStorageService()
{
}
public IPersistentStorage GetStorage(Solution solution)
=> NoOpPersistentStorage.Instance;
public IPersistentStorage GetStorage(Solution solution, bool checkBranchId)
=> NoOpPersistentStorage.Instance;
IChecksummedPersistentStorage IChecksummedPersistentStorageService.GetStorage(Solution solution)
=> NoOpPersistentStorage.Instance;
IChecksummedPersistentStorage IChecksummedPersistentStorageService.GetStorage(Solution solution, bool checkBranchId)
=> NoOpPersistentStorage.Instance;
}
}
| davkean/roslyn | src/Workspaces/Core/Portable/Workspace/Host/PersistentStorage/NoOpPersistentStorageService.cs | C# | apache-2.0 | 1,090 |
# Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse
from django import template
from django.template import defaultfilters as filters
from django.utils.translation import pgettext_lazy
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from horizon import exceptions
from horizon import tables
from openstack_dashboard import api
from openstack_dashboard import policy
from openstack_dashboard.usage import quotas
LOG = logging.getLogger(__name__)
class CheckNetworkEditable(object):
"""Mixin class to determine the specified network is editable."""
def allowed(self, request, datum=None):
# Only administrator is allowed to create and manage shared networks.
if datum and datum.shared:
return False
return True
class DeleteNetwork(policy.PolicyTargetMixin, CheckNetworkEditable,
tables.DeleteAction):
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Network",
u"Delete Networks",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Deleted Network",
u"Deleted Networks",
count
)
policy_rules = (("network", "delete_network"),)
def delete(self, request, network_id):
network_name = network_id
try:
# Retrieve the network list.
network = api.neutron.network_get(request, network_id,
expand_subnet=False)
network_name = network.name
LOG.debug('Network %(network_id)s has subnets: %(subnets)s',
{'network_id': network_id, 'subnets': network.subnets})
for subnet_id in network.subnets:
api.neutron.subnet_delete(request, subnet_id)
LOG.debug('Deleted subnet %s', subnet_id)
api.neutron.network_delete(request, network_id)
LOG.debug('Deleted network %s successfully', network_id)
except Exception:
msg = _('Failed to delete network %s')
LOG.info(msg, network_id)
redirect = reverse("horizon:project:networks:index")
exceptions.handle(request, msg % network_name, redirect=redirect)
class CreateNetwork(tables.LinkAction):
name = "create"
verbose_name = _("Create Network")
url = "horizon:project:networks:create"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "create_network"),)
def allowed(self, request, datum=None):
usages = quotas.tenant_quota_usages(request)
# when Settings.OPENSTACK_NEUTRON_NETWORK['enable_quotas'] = False
# usages["networks"] is empty
if usages.get('networks', {}).get('available', 1) <= 0:
if "disabled" not in self.classes:
self.classes = [c for c in self.classes] + ["disabled"]
self.verbose_name = _("Create Network (Quota exceeded)")
else:
self.verbose_name = _("Create Network")
self.classes = [c for c in self.classes if c != "disabled"]
return True
class EditNetwork(policy.PolicyTargetMixin, CheckNetworkEditable,
tables.LinkAction):
name = "update"
verbose_name = _("Edit Network")
url = "horizon:project:networks:update"
classes = ("ajax-modal",)
icon = "pencil"
policy_rules = (("network", "update_network"),)
class CreateSubnet(policy.PolicyTargetMixin, CheckNetworkEditable,
tables.LinkAction):
name = "subnet"
verbose_name = _("Add Subnet")
url = "horizon:project:networks:addsubnet"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "create_subnet"),)
# neutron has used both in their policy files, supporting both
policy_target_attrs = (("network:tenant_id", "tenant_id"),
("network:project_id", "tenant_id"),)
def allowed(self, request, datum=None):
usages = quotas.tenant_quota_usages(request)
# when Settings.OPENSTACK_NEUTRON_NETWORK['enable_quotas'] = False
# usages["subnets'] is empty
if usages.get('subnets', {}).get('available', 1) <= 0:
if 'disabled' not in self.classes:
self.classes = [c for c in self.classes] + ['disabled']
self.verbose_name = _('Add Subnet (Quota exceeded)')
else:
self.verbose_name = _('Add Subnet')
self.classes = [c for c in self.classes if c != 'disabled']
return True
def get_subnets(network):
template_name = 'project/networks/_network_ips.html'
context = {"subnets": network.subnets}
return template.loader.render_to_string(template_name, context)
DISPLAY_CHOICES = (
("up", pgettext_lazy("Admin state of a Network", u"UP")),
("down", pgettext_lazy("Admin state of a Network", u"DOWN")),
)
STATUS_DISPLAY_CHOICES = (
("active", pgettext_lazy("Current status of a Network", u"Active")),
("build", pgettext_lazy("Current status of a Network", u"Build")),
("down", pgettext_lazy("Current status of a Network", u"Down")),
("error", pgettext_lazy("Current status of a Network", u"Error")),
)
class ProjectNetworksFilterAction(tables.FilterAction):
name = "filter_project_networks"
filter_type = "server"
filter_choices = (('name', _("Name ="), True),
('shared', _("Shared ="), True,
_("e.g. Yes / No")),
('router:external', _("External ="), True,
_("e.g. Yes / No")),
('status', _("Status ="), True),
('admin_state_up', _("Admin State ="), True,
_("e.g. UP / DOWN")))
class NetworksTable(tables.DataTable):
name = tables.WrappingColumn("name_or_id",
verbose_name=_("Name"),
link='horizon:project:networks:detail')
subnets = tables.Column(get_subnets,
verbose_name=_("Subnets Associated"),)
shared = tables.Column("shared", verbose_name=_("Shared"),
filters=(filters.yesno, filters.capfirst))
external = tables.Column("router:external", verbose_name=_("External"),
filters=(filters.yesno, filters.capfirst))
status = tables.Column("status", verbose_name=_("Status"),
display_choices=STATUS_DISPLAY_CHOICES)
admin_state = tables.Column("admin_state",
verbose_name=_("Admin State"),
display_choices=DISPLAY_CHOICES)
class Meta(object):
name = "networks"
verbose_name = _("Networks")
table_actions = (CreateNetwork, DeleteNetwork,
ProjectNetworksFilterAction)
row_actions = (EditNetwork, CreateSubnet, DeleteNetwork)
| coreycb/horizon | openstack_dashboard/dashboards/project/networks/tables.py | Python | apache-2.0 | 7,635 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.descriptors;
import org.apache.flink.annotation.Internal;
import org.apache.flink.table.api.ValidationException;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Consumer;
/** Validator for {@link FunctionDescriptor}. */
@Internal
public class FunctionDescriptorValidator implements DescriptorValidator {
public static final String FROM = "from";
public static final String FROM_VALUE_CLASS = "class";
public static final String FROM_VALUE_PYTHON = "python";
@Override
public void validate(DescriptorProperties properties) {
Map<String, Consumer<String>> enumValidation = new HashMap<>();
enumValidation.put(
FROM_VALUE_CLASS, s -> new ClassInstanceValidator().validate(properties));
enumValidation.put(
FROM_VALUE_PYTHON, s -> new PythonFunctionValidator().validate(properties));
// check for 'from'
if (properties.containsKey(FROM)) {
properties.validateEnum(FROM, false, enumValidation);
} else {
throw new ValidationException("Could not find 'from' property for function.");
}
}
}
| rmetzger/flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/FunctionDescriptorValidator.java | Java | apache-2.0 | 1,990 |
/*
* Copyright (c) 2005, 2006, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.security.smartcardio;
import java.security.*;
import javax.smartcardio.*;
/**
* Provider object for PC/SC.
*
* @since 1.6
* @author Andreas Sterbenz
*/
public final class SunPCSC extends Provider {
private static final long serialVersionUID = 6168388284028876579L;
public SunPCSC() {
super("SunPCSC", 1.6d, "Sun PC/SC provider");
AccessController.doPrivileged(new PrivilegedAction<Void>() {
public Void run() {
put("TerminalFactory.PC/SC", "sun.security.smartcardio.SunPCSC$Factory");
return null;
}
});
}
public static final class Factory extends TerminalFactorySpi {
public Factory(Object obj) throws PCSCException {
if (obj != null) {
throw new IllegalArgumentException
("SunPCSC factory does not use parameters");
}
// make sure PCSC is available and that we can obtain a context
PCSC.checkAvailable();
PCSCTerminals.initContext();
}
/**
* Returns the available readers.
* This must be a new object for each call.
*/
protected CardTerminals engineTerminals() {
return new PCSCTerminals();
}
}
}
| andreagenso/java2scala | test/J2s/java/openjdk-6-src-b27/jdk/src/share/classes/sun/security/smartcardio/SunPCSC.java | Java | apache-2.0 | 2,515 |
# encoding: utf-8
# This file is distributed under New Relic's license terms.
# See https://github.com/newrelic/rpm/blob/master/LICENSE for complete details.
# This class is the central point for dispatching get_agent_commands messages
# to the various components that actually process them.
#
# This could be evented further, but we eventually need direct access to things
# like the ThreadProfiler, so it's simpler to just keep it together here.
require 'new_relic/agent/commands/agent_command'
require 'new_relic/agent/commands/xray_session_collection'
require 'new_relic/agent/threading/backtrace_service'
module NewRelic
module Agent
module Commands
class AgentCommandRouter
attr_reader :handlers
attr_accessor :thread_profiler_session, :backtrace_service,
:xray_session_collection
def initialize(event_listener=nil)
@handlers = Hash.new { |*| Proc.new { |cmd| self.unrecognized_agent_command(cmd) } }
@backtrace_service = Threading::BacktraceService.new(event_listener)
@thread_profiler_session = ThreadProfilerSession.new(@backtrace_service)
@xray_session_collection = XraySessionCollection.new(@backtrace_service, event_listener)
@handlers['start_profiler'] = Proc.new { |cmd| thread_profiler_session.handle_start_command(cmd) }
@handlers['stop_profiler'] = Proc.new { |cmd| thread_profiler_session.handle_stop_command(cmd) }
@handlers['active_xray_sessions'] = Proc.new { |cmd| xray_session_collection.handle_active_xray_sessions(cmd) }
if event_listener
event_listener.subscribe(:before_shutdown, &method(:on_before_shutdown))
end
end
def new_relic_service
NewRelic::Agent.instance.service
end
def check_for_and_handle_agent_commands
commands = get_agent_commands
stop_xray_sessions unless active_xray_command?(commands)
results = invoke_commands(commands)
new_relic_service.agent_command_results(results) unless results.empty?
end
def stop_xray_sessions
self.xray_session_collection.stop_all_sessions
end
def active_xray_command?(commands)
commands.any? {|command| command.name == 'active_xray_sessions'}
end
def on_before_shutdown(*args)
if self.thread_profiler_session.running?
self.thread_profiler_session.stop(true)
end
end
def harvest!
profiles = []
profiles += harvest_from_xray_session_collection
profiles += harvest_from_thread_profiler_session
log_profiles(profiles)
profiles
end
# We don't currently support merging thread profiles that failed to send
# back into the AgentCommandRouter, so we just no-op this method.
# Same with reset! - we don't support asynchronous cancellation of a
# running thread profile or X-Ray session currently.
def merge!(*args); end
def reset!; end
def harvest_from_xray_session_collection
self.xray_session_collection.harvest_thread_profiles
end
def harvest_from_thread_profiler_session
if self.thread_profiler_session.ready_to_harvest?
self.thread_profiler_session.stop(true)
[self.thread_profiler_session.harvest]
else
[]
end
end
def log_profiles(profiles)
if profiles.empty?
::NewRelic::Agent.logger.debug "No thread profiles with data found to send."
else
profile_descriptions = profiles.map { |p| p.to_log_description }
::NewRelic::Agent.logger.debug "Sending thread profiles [#{profile_descriptions.join(", ")}]"
end
end
def get_agent_commands
commands = new_relic_service.get_agent_commands
NewRelic::Agent.logger.debug "Received get_agent_commands = #{commands.inspect}"
commands.map {|collector_command| AgentCommand.new(collector_command)}
end
def invoke_commands(agent_commands)
results = {}
agent_commands.each do |agent_command|
results[agent_command.id.to_s] = invoke_command(agent_command)
end
results
end
class AgentCommandError < StandardError
end
def invoke_command(agent_command)
begin
call_handler_for(agent_command)
return success
rescue AgentCommandError => e
NewRelic::Agent.logger.debug(e)
error(e)
end
end
SUCCESS_RESULT = {}.freeze
ERROR_KEY = "error"
def success
SUCCESS_RESULT
end
def error(err)
{ ERROR_KEY => err.message }
end
def call_handler_for(agent_command)
handler = select_handler(agent_command)
handler.call(agent_command)
end
def select_handler(agent_command)
@handlers[agent_command.name]
end
def unrecognized_agent_command(agent_command)
NewRelic::Agent.logger.debug("Unrecognized agent command #{agent_command.inspect}")
end
end
end
end
end
| dmitrinesterenko/profiling_newrelic | vendor/gems/ruby/2.2.0/gems/newrelic_rpm-3.14.2.312/lib/new_relic/agent/commands/agent_command_router.rb | Ruby | apache-2.0 | 5,318 |
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef __STOUT_OS_WINDOWS_MKDIR_HPP__
#define __STOUT_OS_WINDOWS_MKDIR_HPP__
#include <string>
#include <vector>
#include <stout/error.hpp>
#include <stout/nothing.hpp>
#include <stout/strings.hpp>
#include <stout/try.hpp>
#include <stout/windows.hpp>
#include <stout/os/exists.hpp>
#include <stout/os/constants.hpp>
#include <stout/internal/windows/longpath.hpp>
namespace os {
inline Try<Nothing> mkdir(const std::string& directory, bool recursive = true)
{
if (!recursive) {
// NOTE: We check for existence because parts of certain directories
// like `C:\` will return an error if passed to `CreateDirectory`,
// even though the drive may already exist.
if (os::exists(directory)) {
return Nothing();
}
std::wstring longpath = ::internal::windows::longpath(directory);
if (::CreateDirectoryW(longpath.data(), nullptr) == 0) {
return WindowsError("Failed to create directory: " + directory);
}
} else {
// Remove the long path prefix, if it already exists, otherwise the
// tokenizer includes the long path prefix (`\\?\`) as the first part
// of the path.
std::vector<std::string> tokens = strings::tokenize(
strings::remove(directory, os::LONGPATH_PREFIX, strings::Mode::PREFIX),
stringify(os::PATH_SEPARATOR));
std::string path;
foreach (const std::string& token, tokens) {
path += token + os::PATH_SEPARATOR;
Try<Nothing> result = mkdir(path, false);
if (result.isError()) {
return result;
}
}
}
return Nothing();
}
} // namespace os {
#endif // __STOUT_OS_WINDOWS_MKDIR_HPP__
| shakamunyi/mesos | 3rdparty/stout/include/stout/os/windows/mkdir.hpp | C++ | apache-2.0 | 2,179 |
/*
* Copyright 2017 - 2018 Anton Tananaev (anton@traccar.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.traccar.protocol;
import io.netty.handler.codec.string.StringDecoder;
import io.netty.handler.codec.string.StringEncoder;
import org.traccar.BaseProtocol;
import org.traccar.PipelineBuilder;
import org.traccar.TrackerServer;
public class TmgProtocol extends BaseProtocol {
public TmgProtocol() {
addServer(new TrackerServer(false, getName()) {
@Override
protected void addProtocolHandlers(PipelineBuilder pipeline) {
pipeline.addLast(new TmgFrameDecoder());
pipeline.addLast(new StringEncoder());
pipeline.addLast(new StringDecoder());
pipeline.addLast(new TmgProtocolDecoder(TmgProtocol.this));
}
});
}
}
| tananaev/traccar | src/main/java/org/traccar/protocol/TmgProtocol.java | Java | apache-2.0 | 1,371 |
# coding=utf-8
# Copyright 2022 The init2winit Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| google/init2winit | init2winit/optimizer_lib/__init__.py | Python | apache-2.0 | 603 |
/* Copyright 2005-2006 Tim Fennell
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sourceforge.stripes.util;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.regex.Pattern;
/**
* Provides simple utility methods for dealing with HTML.
*
* @author Tim Fennell
*/
public class HtmlUtil {
private static final String FIELD_DELIMITER_STRING = "||";
private static final Pattern FIELD_DELIMITER_PATTERN = Pattern.compile("\\|\\|");
/**
* Replaces special HTML characters from the set {@literal [<, >, ", ', &]} with their HTML
* escape codes. Note that because the escape codes are multi-character that the returned
* String could be longer than the one passed in.
*
* @param fragment a String fragment that might have HTML special characters in it
* @return the fragment with special characters escaped
*/
public static String encode(String fragment) {
// If the input is null, then the output is null
if (fragment == null) return null;
StringBuilder builder = new StringBuilder(fragment.length() + 10); // a little wiggle room
char[] characters = fragment.toCharArray();
// This loop used to also look for and replace single ticks with ' but it
// turns out that it's not strictly necessary since Stripes uses double-quotes
// around all form fields, and stupid IE6 will render ' verbatim instead
// of as a single quote.
for (int i=0; i<characters.length; ++i) {
switch (characters[i]) {
case '<' : builder.append("<"); break;
case '>' : builder.append(">"); break;
case '"' : builder.append("""); break;
case '&' : builder.append("&"); break;
default: builder.append(characters[i]);
}
}
return builder.toString();
}
/**
* One of a pair of methods (the other is splitValues) that is used to combine several
* un-encoded values into a single delimited, encoded value for placement into a
* hidden field.
*
* @param values One or more values which are to be combined
* @return a single HTML-encoded String that contains all the values in such a way that
* they can be converted back into a Collection of Strings with splitValues().
*/
public static String combineValues(Collection<String> values) {
if (values == null || values.size() == 0) {
return "";
}
else {
StringBuilder builder = new StringBuilder(values.size() * 30);
for (String value : values) {
builder.append(value).append(FIELD_DELIMITER_STRING);
}
return encode(builder.toString());
}
}
/**
* Takes in a String produced by combineValues and returns a Collection of values that
* contains the same values as originally supplied to combineValues. Note that the order
* or items in the collection (and indeed the type of Collection used) are not guaranteed
* to be the same.
*
* @param value a String value produced by
* @return a Collection of zero or more Strings
*/
public static Collection<String> splitValues(String value) {
if (value == null || value.length() == 0) {
return Collections.emptyList();
}
else {
String[] splits = FIELD_DELIMITER_PATTERN.split(value);
return Arrays.asList(splits);
}
}
}
| scarcher2/stripes | stripes/src/net/sourceforge/stripes/util/HtmlUtil.java | Java | apache-2.0 | 4,132 |
/**
* Sitespeed.io - How speedy is your site? (https://www.sitespeed.io)
* Copyright (c) 2014, Peter Hedenskog, Tobias Lidskog
* and other contributors
* Released under the Apache 2.0 License
*/
'use strict';
var util = require('../util/util'),
RequestTiming = require('../requestTiming'),
Stats = require('fast-stats').Stats,
winston = require('winston');
var domains = {};
exports.processPage = function(pageData) {
var log = winston.loggers.get('sitespeed.io');
var harData = [];
if (pageData.browsertime && pageData.browsertime.har) {
Array.prototype.push.apply(harData, pageData.browsertime.har);
}
if (pageData.webpagetest && pageData.webpagetest.har) {
Array.prototype.push.apply(harData, pageData.webpagetest.har);
}
// Workaround to avoid issues when bt doesn't generate a har due to useProxy being set to false
harData = harData.filter(function(har) {
return !!har;
});
var pageURL = util.getURLFromPageData(pageData);
harData.forEach(function(har) {
har.log.entries.forEach(function(entry) {
var domain = domains[util.getHostname(entry.request.url)];
var total;
if (domain) {
if (entry.timings) {
total = entry.timings.blocked + entry.timings.dns + entry.timings.connect + entry.timings.ssl +
entry.timings
.send + entry.timings.wait + entry.timings.receive;
domain.blocked.add(entry.timings.blocked, entry.request.url, pageURL);
domain.dns.add(entry.timings.dns, entry.request.url, pageURL);
domain.connect.add(entry.timings.connect, entry.request.url, pageURL);
domain.ssl.add(entry.timings.ssl, entry.request.url, pageURL);
domain.send.add(entry.timings.send, entry.request.url, pageURL);
domain.wait.add(entry.timings.wait, entry.request.url, pageURL);
domain.receive.add(entry.timings.receive, entry.request.url, pageURL);
domain.total.add(total, entry.request.url, pageURL);
domain.accumulatedTime += total;
} else {
log.log('info', 'Missing timings in the HAR');
}
} else {
if (entry.timings) {
total = entry.timings.blocked + entry.timings.dns + entry.timings.connect + entry.timings.ssl +
entry.timings
.send + entry.timings.wait + entry.timings.receive;
domains[util.getHostname(entry.request.url)] = {
domain: util.getHostname(entry.request.url),
blocked: new RequestTiming(entry.timings.blocked, entry.request.url, pageURL),
dns: new RequestTiming(entry.timings.dns, entry.request.url, pageURL),
connect: new RequestTiming(entry.timings.connect, entry.request.url, pageURL),
ssl: new RequestTiming(entry.timings.ssl, entry.request.url, pageURL),
send: new RequestTiming(entry.timings.send, entry.request.url, pageURL),
wait: new RequestTiming(entry.timings.wait, entry.request.url, pageURL),
receive: new RequestTiming(entry.timings.receive, entry.request.url, pageURL),
total: new RequestTiming(total, entry.request.url, pageURL),
accumulatedTime: total
};
} else {
log.log('info', 'Missing timings in the HAR');
}
}
});
});
// we have HAR files with one page tested multiple times,
// make sure we only get data from the first run
// and we kind of add items & size for requests missing
// but only for the first one
var pageref = '';
// add request & size, just do it for the first run
if (harData.length > 0) {
harData[0].log.entries.forEach(function(entry) {
if (pageref === '' || entry.pageref === pageref) {
pageref = entry.pageref;
var domain = domains[util.getHostname(entry.request.url)];
if (domain.count) {
domain.count++;
} else {
domain.count = 1;
}
if (domain.size) {
domain.size.total += entry.response.content.size;
domain.size[util.getContentType(entry.response.content.mimeType)] +=
entry.response.content.size;
} else {
domain.size = {
total: entry.response.content.size,
css: 0,
doc: 0,
js: 0,
image: 0,
font: 0,
flash: 0,
unknown: 0
};
domain.size[util.getContentType(entry.response.content.mimeType)] = entry.response.content.size;
}
} else {
// all other har files
var daDomain = domains[util.getHostname(entry.request.url)];
if (!daDomain.count) {
daDomain.count = 1;
}
if (!daDomain.size) {
// this is not perfect, we will miss request in other HAR..s
daDomain.size = {
total: entry.response.content.size,
css: 0,
doc: 0,
js: 0,
image: 0,
font: 0,
flash: 0,
unknown: 0
};
daDomain.size[util.getContentType(entry.response.content.mimeType)] = entry.response.content.size;
}
}
});
}
};
exports.generateResults = function() {
var values = Object.keys(domains).map(function(key) {
return domains[key];
});
return {
id: 'domains',
list: values
};
};
exports.clear = function() {
domains = {};
};
| yesman82/sitespeed.io | lib/collectors/domains.js | JavaScript | apache-2.0 | 5,405 |
/*
* Copyright 2016 SimplifyOps, Inc. (http://simplifyops.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dtolabs.rundeck.server.plugins.services;
import com.dtolabs.rundeck.core.plugins.BasePluggableProviderService;
import com.dtolabs.rundeck.core.plugins.ServiceProviderLoader;
import com.dtolabs.rundeck.plugins.ServiceNameConstants;
import com.dtolabs.rundeck.plugins.logging.StreamingLogReaderPlugin;
/** $INTERFACE is ... User: greg Date: 5/24/13 Time: 9:32 AM */
public class StreamingLogReaderPluginProviderService extends BasePluggableProviderService<StreamingLogReaderPlugin> {
public static final String SERVICE_NAME = ServiceNameConstants.StreamingLogReader;
private ServiceProviderLoader rundeckServerServiceProviderLoader;
public StreamingLogReaderPluginProviderService() {
super(SERVICE_NAME, StreamingLogReaderPlugin.class);
}
@Override
public ServiceProviderLoader getPluginManager() {
return getRundeckServerServiceProviderLoader();
}
public ServiceProviderLoader getRundeckServerServiceProviderLoader() {
return rundeckServerServiceProviderLoader;
}
public void setRundeckServerServiceProviderLoader(ServiceProviderLoader rundeckServerServiceProviderLoader) {
this.rundeckServerServiceProviderLoader = rundeckServerServiceProviderLoader;
}
@Override
public boolean isScriptPluggable() {
//for now
return false;
}
}
| damageboy/rundeck | rundeckapp/src/java/com/dtolabs/rundeck/server/plugins/services/StreamingLogReaderPluginProviderService.java | Java | apache-2.0 | 1,978 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.example.cxf.jaxrs;
import org.apache.camel.example.cxf.jaxrs.resources.Book;
import org.apache.camel.example.cxf.jaxrs.resources.BookNotFoundFault;
import org.apache.camel.example.cxf.jaxrs.resources.BookStore;
public class Client {
void invoke() throws BookNotFoundFault {
// JAXWSClient invocation
JAXWSClient jaxwsClient = new JAXWSClient();
BookStore bookStore = jaxwsClient.getBookStore();
bookStore.addBook(new Book("Camel User Guide", 123L));
Book book = bookStore.getBook(123L);
System.out.println("Get the book with id 123. " + book);
try {
book = bookStore.getBook(124L);
System.out.println("Get the book with id 124. " + book);
} catch (Exception exception) {
System.out.println("Expected exception received: " + exception);
}
// JAXRSClient invocation
JAXRSClient jaxrsClient = new JAXRSClient();
bookStore = jaxrsClient.getBookStore();
bookStore.addBook(new Book("Karaf User Guide", 124L));
book = bookStore.getBook(124L);
System.out.println("Get the book with id 124. " + book);
try {
book = bookStore.getBook(126L);
System.out.println("Get the book with id 126. " + book);
} catch (Exception exception) {
System.out.println("Expected exception received: " + exception);
}
}
public static void main(String args[]) throws Exception {
Client client = new Client();
client.invoke();
}
}
| objectiser/camel | examples/camel-example-cxf/src/main/java/org/apache/camel/example/cxf/jaxrs/Client.java | Java | apache-2.0 | 2,440 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.significant;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.PercentageScore;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicBuilder;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParserMapper;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.TestSearchContext;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
/**
*
*/
public class SignificanceHeuristicTests extends ESTestCase {
static class SignificantTermsTestSearchContext extends TestSearchContext {
@Override
public int numberOfShards() {
return 1;
}
@Override
public SearchShardTarget shardTarget() {
return new SearchShardTarget("no node, this is a unit test", "no index, this is a unit test", 0);
}
}
// test that stream output can actually be read - does not replace bwc test
public void testStreamResponse() throws Exception {
Version version = randomVersion(random());
InternalSignificantTerms[] sigTerms = getRandomSignificantTerms(getRandomSignificanceheuristic());
// write
ByteArrayOutputStream outBuffer = new ByteArrayOutputStream();
OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer);
out.setVersion(version);
sigTerms[0].writeTo(out);
// read
ByteArrayInputStream inBuffer = new ByteArrayInputStream(outBuffer.toByteArray());
InputStreamStreamInput in = new InputStreamStreamInput(inBuffer);
in.setVersion(version);
sigTerms[1].readFrom(in);
assertTrue(sigTerms[1].significanceHeuristic.equals(sigTerms[0].significanceHeuristic));
}
InternalSignificantTerms[] getRandomSignificantTerms(SignificanceHeuristic heuristic) {
InternalSignificantTerms[] sTerms = new InternalSignificantTerms[2];
ArrayList<InternalSignificantTerms.Bucket> buckets = new ArrayList<>();
if (randomBoolean()) {
BytesRef term = new BytesRef("123.0");
buckets.add(new SignificantLongTerms.Bucket(1, 2, 3, 4, 123, InternalAggregations.EMPTY, null));
sTerms[0] = new SignificantLongTerms(10, 20, "some_name", null, 1, 1, heuristic, buckets,
Collections.EMPTY_LIST, null);
sTerms[1] = new SignificantLongTerms();
} else {
BytesRef term = new BytesRef("someterm");
buckets.add(new SignificantStringTerms.Bucket(term, 1, 2, 3, 4, InternalAggregations.EMPTY));
sTerms[0] = new SignificantStringTerms(10, 20, "some_name", 1, 1, heuristic, buckets, Collections.EMPTY_LIST,
null);
sTerms[1] = new SignificantStringTerms();
}
return sTerms;
}
SignificanceHeuristic getRandomSignificanceheuristic() {
List<SignificanceHeuristic> heuristics = new ArrayList<>();
heuristics.add(JLHScore.INSTANCE);
heuristics.add(new MutualInformation(randomBoolean(), randomBoolean()));
heuristics.add(new GND(randomBoolean()));
heuristics.add(new ChiSquare(randomBoolean(), randomBoolean()));
return heuristics.get(randomInt(3));
}
// test that
// 1. The output of the builders can actually be parsed
// 2. The parser does not swallow parameters after a significance heuristic was defined
public void testBuilderAndParser() throws Exception {
Set<SignificanceHeuristicParser> parsers = new HashSet<>();
SignificanceHeuristicParserMapper heuristicParserMapper = new SignificanceHeuristicParserMapper(parsers, null);
SearchContext searchContext = new SignificantTermsTestSearchContext();
// test jlh with string
assertTrue(parseFromString(heuristicParserMapper, searchContext, "\"jlh\":{}") instanceof JLHScore);
// test gnd with string
assertTrue(parseFromString(heuristicParserMapper, searchContext, "\"gnd\":{}") instanceof GND);
// test mutual information with string
boolean includeNegatives = randomBoolean();
boolean backgroundIsSuperset = randomBoolean();
assertThat(parseFromString(heuristicParserMapper, searchContext, "\"mutual_information\":{\"include_negatives\": " + includeNegatives + ", \"background_is_superset\":" + backgroundIsSuperset + "}"), equalTo((SignificanceHeuristic) (new MutualInformation(includeNegatives, backgroundIsSuperset))));
assertThat(parseFromString(heuristicParserMapper, searchContext, "\"chi_square\":{\"include_negatives\": " + includeNegatives + ", \"background_is_superset\":" + backgroundIsSuperset + "}"), equalTo((SignificanceHeuristic) (new ChiSquare(includeNegatives, backgroundIsSuperset))));
// test with builders
assertTrue(parseFromBuilder(heuristicParserMapper, searchContext, new JLHScore.JLHScoreBuilder()) instanceof JLHScore);
assertTrue(parseFromBuilder(heuristicParserMapper, searchContext, new GND.GNDBuilder(backgroundIsSuperset)) instanceof GND);
assertThat(parseFromBuilder(heuristicParserMapper, searchContext, new MutualInformation.MutualInformationBuilder(includeNegatives, backgroundIsSuperset)), equalTo((SignificanceHeuristic) new MutualInformation(includeNegatives, backgroundIsSuperset)));
assertThat(parseFromBuilder(heuristicParserMapper, searchContext, new ChiSquare.ChiSquareBuilder(includeNegatives, backgroundIsSuperset)), equalTo((SignificanceHeuristic) new ChiSquare(includeNegatives, backgroundIsSuperset)));
// test exceptions
String faultyHeuristicdefinition = "\"mutual_information\":{\"include_negatives\": false, \"some_unknown_field\": false}";
String expectedError = "unknown field [some_unknown_field]";
checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError);
faultyHeuristicdefinition = "\"chi_square\":{\"unknown_field\": true}";
expectedError = "unknown field [unknown_field]";
checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError);
faultyHeuristicdefinition = "\"jlh\":{\"unknown_field\": true}";
expectedError = "expected an empty object, but found ";
checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError);
faultyHeuristicdefinition = "\"gnd\":{\"unknown_field\": true}";
expectedError = "unknown field [unknown_field]";
checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError);
}
protected void checkParseException(SignificanceHeuristicParserMapper heuristicParserMapper, SearchContext searchContext, String faultyHeuristicDefinition, String expectedError) throws IOException {
try {
XContentParser stParser = JsonXContent.jsonXContent.createParser("{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}");
stParser.nextToken();
new SignificantTermsParser(heuristicParserMapper).parse("testagg", stParser, searchContext);
fail();
} catch (ElasticsearchParseException e) {
assertTrue(e.getMessage().contains(expectedError));
}
}
protected SignificanceHeuristic parseFromBuilder(SignificanceHeuristicParserMapper heuristicParserMapper, SearchContext searchContext, SignificanceHeuristicBuilder significanceHeuristicBuilder) throws IOException {
SignificantTermsBuilder stBuilder = new SignificantTermsBuilder("testagg");
stBuilder.significanceHeuristic(significanceHeuristicBuilder).field("text").minDocCount(200);
XContentBuilder stXContentBuilder = XContentFactory.jsonBuilder();
stBuilder.internalXContent(stXContentBuilder, null);
XContentParser stParser = JsonXContent.jsonXContent.createParser(stXContentBuilder.string());
return parseSignificanceHeuristic(heuristicParserMapper, searchContext, stParser);
}
private SignificanceHeuristic parseSignificanceHeuristic(SignificanceHeuristicParserMapper heuristicParserMapper, SearchContext searchContext, XContentParser stParser) throws IOException {
stParser.nextToken();
SignificantTermsAggregatorFactory aggregatorFactory = (SignificantTermsAggregatorFactory) new SignificantTermsParser(heuristicParserMapper).parse("testagg", stParser, searchContext);
stParser.nextToken();
assertThat(aggregatorFactory.getBucketCountThresholds().getMinDocCount(), equalTo(200l));
assertThat(stParser.currentToken(), equalTo(null));
stParser.close();
return aggregatorFactory.getSignificanceHeuristic();
}
protected SignificanceHeuristic parseFromString(SignificanceHeuristicParserMapper heuristicParserMapper, SearchContext searchContext, String heuristicString) throws IOException {
XContentParser stParser = JsonXContent.jsonXContent.createParser("{\"field\":\"text\", " + heuristicString + ", \"min_doc_count\":200}");
return parseSignificanceHeuristic(heuristicParserMapper, searchContext, stParser);
}
void testBackgroundAssertions(SignificanceHeuristic heuristicIsSuperset, SignificanceHeuristic heuristicNotSuperset) {
try {
heuristicIsSuperset.getScore(2, 3, 1, 4);
fail();
} catch (IllegalArgumentException illegalArgumentException) {
assertNotNull(illegalArgumentException.getMessage());
assertTrue(illegalArgumentException.getMessage().contains("subsetFreq > supersetFreq"));
}
try {
heuristicIsSuperset.getScore(1, 4, 2, 3);
fail();
} catch (IllegalArgumentException illegalArgumentException) {
assertNotNull(illegalArgumentException.getMessage());
assertTrue(illegalArgumentException.getMessage().contains("subsetSize > supersetSize"));
}
try {
heuristicIsSuperset.getScore(2, 1, 3, 4);
fail();
} catch (IllegalArgumentException illegalArgumentException) {
assertNotNull(illegalArgumentException.getMessage());
assertTrue(illegalArgumentException.getMessage().contains("subsetFreq > subsetSize"));
}
try {
heuristicIsSuperset.getScore(1, 2, 4, 3);
fail();
} catch (IllegalArgumentException illegalArgumentException) {
assertNotNull(illegalArgumentException.getMessage());
assertTrue(illegalArgumentException.getMessage().contains("supersetFreq > supersetSize"));
}
try {
heuristicIsSuperset.getScore(1, 3, 4, 4);
fail();
} catch (IllegalArgumentException assertionError) {
assertNotNull(assertionError.getMessage());
assertTrue(assertionError.getMessage().contains("supersetFreq - subsetFreq > supersetSize - subsetSize"));
}
try {
int idx = randomInt(3);
long[] values = {1, 2, 3, 4};
values[idx] *= -1;
heuristicIsSuperset.getScore(values[0], values[1], values[2], values[3]);
fail();
} catch (IllegalArgumentException illegalArgumentException) {
assertNotNull(illegalArgumentException.getMessage());
assertTrue(illegalArgumentException.getMessage().contains("Frequencies of subset and superset must be positive"));
}
try {
heuristicNotSuperset.getScore(2, 1, 3, 4);
fail();
} catch (IllegalArgumentException illegalArgumentException) {
assertNotNull(illegalArgumentException.getMessage());
assertTrue(illegalArgumentException.getMessage().contains("subsetFreq > subsetSize"));
}
try {
heuristicNotSuperset.getScore(1, 2, 4, 3);
fail();
} catch (IllegalArgumentException illegalArgumentException) {
assertNotNull(illegalArgumentException.getMessage());
assertTrue(illegalArgumentException.getMessage().contains("supersetFreq > supersetSize"));
}
try {
int idx = randomInt(3);
long[] values = {1, 2, 3, 4};
values[idx] *= -1;
heuristicNotSuperset.getScore(values[0], values[1], values[2], values[3]);
fail();
} catch (IllegalArgumentException illegalArgumentException) {
assertNotNull(illegalArgumentException.getMessage());
assertTrue(illegalArgumentException.getMessage().contains("Frequencies of subset and superset must be positive"));
}
}
void testAssertions(SignificanceHeuristic heuristic) {
try {
int idx = randomInt(3);
long[] values = {1, 2, 3, 4};
values[idx] *= -1;
heuristic.getScore(values[0], values[1], values[2], values[3]);
fail();
} catch (IllegalArgumentException illegalArgumentException) {
assertNotNull(illegalArgumentException.getMessage());
assertTrue(illegalArgumentException.getMessage().contains("Frequencies of subset and superset must be positive"));
}
try {
heuristic.getScore(1, 2, 4, 3);
fail();
} catch (IllegalArgumentException illegalArgumentException) {
assertNotNull(illegalArgumentException.getMessage());
assertTrue(illegalArgumentException.getMessage().contains("supersetFreq > supersetSize"));
}
try {
heuristic.getScore(2, 1, 3, 4);
fail();
} catch (IllegalArgumentException illegalArgumentException) {
assertNotNull(illegalArgumentException.getMessage());
assertTrue(illegalArgumentException.getMessage().contains("subsetFreq > subsetSize"));
}
}
public void testAssertions() throws Exception {
testBackgroundAssertions(new MutualInformation(true, true), new MutualInformation(true, false));
testBackgroundAssertions(new ChiSquare(true, true), new ChiSquare(true, false));
testBackgroundAssertions(new GND(true), new GND(false));
testAssertions(PercentageScore.INSTANCE);
testAssertions(JLHScore.INSTANCE);
}
public void testBasicScoreProperties() {
basicScoreProperties(JLHScore.INSTANCE, true);
basicScoreProperties(new GND(true), true);
basicScoreProperties(PercentageScore.INSTANCE, true);
basicScoreProperties(new MutualInformation(true, true), false);
basicScoreProperties(new ChiSquare(true, true), false);
}
public void basicScoreProperties(SignificanceHeuristic heuristic, boolean test0) {
assertThat(heuristic.getScore(1, 1, 1, 3), greaterThan(0.0));
assertThat(heuristic.getScore(1, 1, 2, 3), lessThan(heuristic.getScore(1, 1, 1, 3)));
assertThat(heuristic.getScore(1, 1, 3, 4), lessThan(heuristic.getScore(1, 1, 2, 4)));
if (test0) {
assertThat(heuristic.getScore(0, 1, 2, 3), equalTo(0.0));
}
double score = 0.0;
try {
long a = randomLong();
long b = randomLong();
long c = randomLong();
long d = randomLong();
score = heuristic.getScore(a, b, c, d);
} catch (IllegalArgumentException e) {
}
assertThat(score, greaterThanOrEqualTo(0.0));
}
public void testScoreMutual() throws Exception {
SignificanceHeuristic heuristic = new MutualInformation(true, true);
assertThat(heuristic.getScore(1, 1, 1, 3), greaterThan(0.0));
assertThat(heuristic.getScore(1, 1, 2, 3), lessThan(heuristic.getScore(1, 1, 1, 3)));
assertThat(heuristic.getScore(2, 2, 2, 4), equalTo(1.0));
assertThat(heuristic.getScore(0, 2, 2, 4), equalTo(1.0));
assertThat(heuristic.getScore(2, 2, 4, 4), equalTo(0.0));
assertThat(heuristic.getScore(1, 2, 2, 4), equalTo(0.0));
assertThat(heuristic.getScore(3, 6, 9, 18), equalTo(0.0));
double score = 0.0;
try {
long a = randomLong();
long b = randomLong();
long c = randomLong();
long d = randomLong();
score = heuristic.getScore(a, b, c, d);
} catch (IllegalArgumentException e) {
}
assertThat(score, lessThanOrEqualTo(1.0));
assertThat(score, greaterThanOrEqualTo(0.0));
heuristic = new MutualInformation(false, true);
assertThat(heuristic.getScore(0, 1, 2, 3), equalTo(Double.NEGATIVE_INFINITY));
heuristic = new MutualInformation(true, false);
score = heuristic.getScore(2, 3, 1, 4);
assertThat(score, greaterThanOrEqualTo(0.0));
assertThat(score, lessThanOrEqualTo(1.0));
score = heuristic.getScore(1, 4, 2, 3);
assertThat(score, greaterThanOrEqualTo(0.0));
assertThat(score, lessThanOrEqualTo(1.0));
score = heuristic.getScore(1, 3, 4, 4);
assertThat(score, greaterThanOrEqualTo(0.0));
assertThat(score, lessThanOrEqualTo(1.0));
}
public void testGNDCornerCases() throws Exception {
GND gnd = new GND(true);
//term is only in the subset, not at all in the other set but that is because the other set is empty.
// this should actually not happen because only terms that are in the subset are considered now,
// however, in this case the score should be 0 because a term that does not exist cannot be relevant...
assertThat(gnd.getScore(0, randomIntBetween(1, 2), 0, randomIntBetween(2,3)), equalTo(0.0));
// the terms do not co-occur at all - should be 0
assertThat(gnd.getScore(0, randomIntBetween(1, 2), randomIntBetween(2, 3), randomIntBetween(5,6)), equalTo(0.0));
// comparison between two terms that do not exist - probably not relevant
assertThat(gnd.getScore(0, 0, 0, randomIntBetween(1,2)), equalTo(0.0));
// terms co-occur perfectly - should be 1
assertThat(gnd.getScore(1, 1, 1, 1), equalTo(1.0));
gnd = new GND(false);
assertThat(gnd.getScore(0, 0, 0, 0), equalTo(0.0));
}
}
| PhaedrusTheGreek/elasticsearch | core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java | Java | apache-2.0 | 20,764 |
/// <reference path='fourslash.ts' />
// @noUnusedLocals: true
// @noUnusedParameters: true
////function g(a, b) { b; }
////g(1, 2);
verify.not.codeFixAvailable("Remove unused declaration for: 'a'");
| Microsoft/TypeScript | tests/cases/fourslash/codeFixUnusedIdentifier_parameter.ts | TypeScript | apache-2.0 | 203 |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiField;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiMethod;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.PsiShortNamesCache;
import com.intellij.util.ArrayUtil;
import com.intellij.util.CommonProcessors;
import com.intellij.util.Processor;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.indexing.IdFilter;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Arrays;
import java.util.Set;
public class CompositeShortNamesCache extends PsiShortNamesCache {
private final PsiShortNamesCache[] myCaches;
public CompositeShortNamesCache(Project project) {
myCaches = project.isDefault() ? new PsiShortNamesCache[0] : project.getExtensions(PsiShortNamesCache.EP_NAME);
}
@Override
@NotNull
public PsiFile[] getFilesByName(@NotNull String name) {
Merger<PsiFile> merger = null;
for (PsiShortNamesCache cache : myCaches) {
PsiFile[] classes = cache.getFilesByName(name);
if (classes.length != 0) {
if (merger == null) merger = new Merger<>();
merger.add(classes);
}
}
PsiFile[] result = merger == null ? null : merger.getResult();
return result != null ? result : PsiFile.EMPTY_ARRAY;
}
@Override
@NotNull
public String[] getAllFileNames() {
Merger<String> merger = new Merger<>();
for (PsiShortNamesCache cache : myCaches) {
merger.add(cache.getAllFileNames());
}
String[] result = merger.getResult();
return result != null ? result : ArrayUtil.EMPTY_STRING_ARRAY;
}
@Override
@NotNull
public PsiClass[] getClassesByName(@NotNull String name, @NotNull GlobalSearchScope scope) {
Merger<PsiClass> merger = null;
for (PsiShortNamesCache cache : myCaches) {
PsiClass[] classes = cache.getClassesByName(name, scope);
if (classes.length != 0) {
if (merger == null) merger = new Merger<>();
merger.add(classes);
}
}
PsiClass[] result = merger == null ? null : merger.getResult();
return result != null ? result : PsiClass.EMPTY_ARRAY;
}
@Override
@NotNull
public String[] getAllClassNames() {
Merger<String> merger = new Merger<>();
for (PsiShortNamesCache cache : myCaches) {
String[] names = cache.getAllClassNames();
merger.add(names);
}
String[] result = merger.getResult();
return result != null ? result : ArrayUtil.EMPTY_STRING_ARRAY;
}
@Override
public boolean processAllClassNames(@NotNull Processor<String> processor) {
CommonProcessors.UniqueProcessor<String> uniqueProcessor = new CommonProcessors.UniqueProcessor<>(processor);
for (PsiShortNamesCache cache : myCaches) {
if (!cache.processAllClassNames(uniqueProcessor)) {
return false;
}
}
return true;
}
@Override
public boolean processAllClassNames(@NotNull Processor<String> processor, @NotNull GlobalSearchScope scope, IdFilter filter) {
for (PsiShortNamesCache cache : myCaches) {
if (!cache.processAllClassNames(processor, scope, filter)) {
return false;
}
}
return true;
}
@Override
public boolean processAllMethodNames(@NotNull Processor<String> processor, @NotNull GlobalSearchScope scope, IdFilter filter) {
for (PsiShortNamesCache cache : myCaches) {
if (!cache.processAllMethodNames(processor, scope, filter)) {
return false;
}
}
return true;
}
@Override
public boolean processAllFieldNames(@NotNull Processor<String> processor, @NotNull GlobalSearchScope scope, IdFilter filter) {
for (PsiShortNamesCache cache : myCaches) {
if (!cache.processAllFieldNames(processor, scope, filter)) {
return false;
}
}
return true;
}
@Override
@NotNull
public PsiMethod[] getMethodsByName(@NotNull String name, @NotNull GlobalSearchScope scope) {
Merger<PsiMethod> merger = null;
for (PsiShortNamesCache cache : myCaches) {
PsiMethod[] methods = cache.getMethodsByName(name, scope);
if (methods.length != 0) {
if (merger == null) merger = new Merger<>();
merger.add(methods);
}
}
PsiMethod[] result = merger == null ? null : merger.getResult();
return result == null ? PsiMethod.EMPTY_ARRAY : result;
}
@Override
@NotNull
public PsiMethod[] getMethodsByNameIfNotMoreThan(@NonNls @NotNull final String name, @NotNull final GlobalSearchScope scope, final int maxCount) {
Merger<PsiMethod> merger = null;
for (PsiShortNamesCache cache : myCaches) {
PsiMethod[] methods = cache.getMethodsByNameIfNotMoreThan(name, scope, maxCount);
if (methods.length == maxCount) return methods;
if (methods.length != 0) {
if (merger == null) merger = new Merger<>();
merger.add(methods);
}
}
PsiMethod[] result = merger == null ? null : merger.getResult();
return result == null ? PsiMethod.EMPTY_ARRAY : result;
}
@NotNull
@Override
public PsiField[] getFieldsByNameIfNotMoreThan(@NonNls @NotNull String name, @NotNull GlobalSearchScope scope, int maxCount) {
Merger<PsiField> merger = null;
for (PsiShortNamesCache cache : myCaches) {
PsiField[] fields = cache.getFieldsByNameIfNotMoreThan(name, scope, maxCount);
if (fields.length == maxCount) return fields;
if (fields.length != 0) {
if (merger == null) merger = new Merger<>();
merger.add(fields);
}
}
PsiField[] result = merger == null ? null : merger.getResult();
return result == null ? PsiField.EMPTY_ARRAY : result;
}
@Override
public boolean processMethodsWithName(@NonNls @NotNull String name,
@NotNull GlobalSearchScope scope,
@NotNull Processor<PsiMethod> processor) {
return processMethodsWithName(name, processor, scope, null);
}
@Override
public boolean processMethodsWithName(@NonNls @NotNull String name,
@NotNull Processor<? super PsiMethod> processor,
@NotNull GlobalSearchScope scope,
@Nullable IdFilter idFilter) {
for (PsiShortNamesCache cache : myCaches) {
if (!cache.processMethodsWithName(name, processor, scope, idFilter)) return false;
}
return true;
}
@Override
@NotNull
public String[] getAllMethodNames() {
Merger<String> merger = new Merger<>();
for (PsiShortNamesCache cache : myCaches) {
merger.add(cache.getAllMethodNames());
}
String[] result = merger.getResult();
return result != null ? result : ArrayUtil.EMPTY_STRING_ARRAY;
}
@Override
@NotNull
public PsiField[] getFieldsByName(@NotNull String name, @NotNull GlobalSearchScope scope) {
Merger<PsiField> merger = null;
for (PsiShortNamesCache cache : myCaches) {
PsiField[] classes = cache.getFieldsByName(name, scope);
if (classes.length != 0) {
if (merger == null) merger = new Merger<>();
merger.add(classes);
}
}
PsiField[] result = merger == null ? null : merger.getResult();
return result == null ? PsiField.EMPTY_ARRAY : result;
}
@Override
@NotNull
public String[] getAllFieldNames() {
Merger<String> merger = null;
for (PsiShortNamesCache cache : myCaches) {
String[] classes = cache.getAllFieldNames();
if (classes.length != 0) {
if (merger == null) merger = new Merger<>();
merger.add(classes);
}
}
String[] result = merger == null ? null : merger.getResult();
return result == null ? ArrayUtil.EMPTY_STRING_ARRAY : result;
}
@Override
public boolean processFieldsWithName(@NotNull String key,
@NotNull Processor<? super PsiField> processor,
@NotNull GlobalSearchScope scope,
@Nullable IdFilter filter) {
for (PsiShortNamesCache cache : myCaches) {
if (!cache.processFieldsWithName(key, processor, scope, filter)) return false;
}
return true;
}
@Override
public boolean processClassesWithName(@NotNull String key,
@NotNull Processor<? super PsiClass> processor,
@NotNull GlobalSearchScope scope,
@Nullable IdFilter filter) {
for (PsiShortNamesCache cache : myCaches) {
if (!cache.processClassesWithName(key, processor, scope, filter)) return false;
}
return true;
}
private static class Merger<T> {
private T[] mySingleItem;
private Set<T> myAllItems;
public void add(@NotNull T[] items) {
if (items.length == 0) return;
if (mySingleItem == null) {
mySingleItem = items;
return;
}
if (myAllItems == null) {
T[] elements = mySingleItem;
myAllItems = ContainerUtil.addAll(new THashSet<>(elements.length), elements);
}
ContainerUtil.addAll(myAllItems, items);
}
public T[] getResult() {
if (myAllItems == null) return mySingleItem;
return myAllItems.toArray(mySingleItem);
}
}
@SuppressWarnings({"HardCodedStringLiteral"})
@Override
public String toString() {
return "Composite cache: " + Arrays.asList(myCaches);
}
}
| jk1/intellij-community | java/java-indexing-impl/src/com/intellij/psi/impl/CompositeShortNamesCache.java | Java | apache-2.0 | 10,218 |
/*************************GO-LICENSE-START*********************************
* Copyright 2014 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************GO-LICENSE-END***********************************/
package com.thoughtworks.go.domain.materials.git;
import com.thoughtworks.go.domain.materials.Modification;
import com.thoughtworks.go.util.DateUtils;
import java.util.LinkedList;
import java.util.List;
import java.util.Optional;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class GitModificationParser {
private LinkedList<Modification> modifications = new LinkedList<>();
private static final String SPACES = "\\s+";
private static final String COMMENT_INDENT = "\\s{4}";
private static final String COMMENT_TEXT = "(.*)";
private static final String HASH = "(\\w+)";
private static final String DATE = "(.+)";
private static final String AUTHOR = "(.+)";
private static final Pattern COMMIT_PATTERN = Pattern.compile("^commit" + SPACES + HASH + "$");
private static final Pattern AUTHOR_PATTERN = Pattern.compile("^Author:"+ SPACES + AUTHOR + "$");
private static final Pattern DATE_PATTERN = Pattern.compile("^Date:" + SPACES + DATE + "$");
private static final Pattern COMMENT_PATTERN = Pattern.compile("^" + COMMENT_INDENT + COMMENT_TEXT + "$");
public List<Modification> parse(List<String> output) {
for (String line : output) {
processLine(line);
}
return modifications;
}
public List<Modification> getModifications() {
return modifications;
}
public void processLine(String line) {
Matcher matcher = COMMIT_PATTERN.matcher(line);
if (matcher.matches()) {
modifications.add(new Modification("", "", null, null, matcher.group(1)));
}
Matcher authorMatcher = AUTHOR_PATTERN.matcher(line);
if (authorMatcher.matches()) {
modifications.getLast().setUserName(authorMatcher.group(1));
}
Matcher dateMatcher = DATE_PATTERN.matcher(line);
if (dateMatcher.matches()) {
modifications.getLast().setModifiedTime(DateUtils.parseISO8601(dateMatcher.group(1)));
}
Matcher commentMatcher = COMMENT_PATTERN.matcher(line);
if (commentMatcher.matches()) {
Modification last = modifications.getLast();
String comment = Optional.ofNullable(last.getComment()).orElse("");
if (!comment.isEmpty()) comment += "\n";
last.setComment(comment + commentMatcher.group(1));
}
}
}
| varshavaradarajan/gocd | domain/src/main/java/com/thoughtworks/go/domain/materials/git/GitModificationParser.java | Java | apache-2.0 | 3,128 |
/*
* $Id$
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.struts2.showcase.chat;
import java.io.Serializable;
import java.util.Date;
/**
* Represends a user in the Chat example.
*/
public class User implements Serializable {
private static final long serialVersionUID = -1434958919516089297L;
private String name;
private Date creationDate;
public User(String name) {
this.name = name;
this.creationDate = new Date(System.currentTimeMillis());
}
public Date getCreationDate() {
return creationDate;
}
public String getName() {
return name;
}
}
| Ile2/struts2-showcase-demo | src/apps/showcase/src/main/java/org/apache/struts2/showcase/chat/User.java | Java | apache-2.0 | 1,350 |
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.retrofit2.app;
import auto.parcel.AutoParcel;
import android.os.Parcelable;
import android.support.annotation.Nullable;
@AutoParcel
public abstract class Item implements Parcelable {
@Nullable
public abstract String icon();
@Nullable
public abstract String text1();
@AutoParcel.Builder
public abstract static class Builder {
public abstract Builder icon(String s);
public abstract Builder text1(String s);
public abstract Item build();
}
public static Builder builder() {
return new AutoParcel_Item.Builder();
}
public abstract Builder toBuilder();
}
| yongjhih/NotRetrofit | retrofit2-github-app/src/main/java/com/github/retrofit2/app/Item.java | Java | apache-2.0 | 1,268 |
package org.jboss.resteasy.api.validation;
import java.io.Serializable;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
/**
*
* @author <a href="ron.sigal@jboss.com">Ron Sigal</a>
* @version $Revision: 1.1 $
*
* Copyright Jun 4, 2013
*/
@XmlRootElement(name="resteasyConstraintViolation")
@XmlAccessorType(XmlAccessType.FIELD)
public class ResteasyConstraintViolation implements Serializable
{
private static final long serialVersionUID = -5441628046215135260L;
private ConstraintType.Type constraintType;
private String path;
private String message;
private String value;
public ResteasyConstraintViolation(ConstraintType.Type constraintType, String path, String message, String value)
{
this.constraintType = constraintType;
this.path = path;
this.message = message;
this.value = value;
}
public ResteasyConstraintViolation()
{
}
/**
* @return type of constraint
*/
public ConstraintType.Type getConstraintType()
{
return constraintType;
}
/**
* @return description of element violating constraint
*/
public String getPath()
{
return path;
}
/**
* @return description of constraint violation
*/
public String getMessage()
{
return message;
}
/**
* @return object in violation of constraint
*/
public String getValue()
{
return value;
}
/**
* @return String representation of violation
*/
public String toString()
{
return "[" + type() + "]\r[" + path + "]\r[" + message + "]\r[" + value + "]\r";
}
/**
* @return String form of violation type
*/
public String type()
{
return constraintType.toString();
}
} | psakar/Resteasy | resteasy-jaxrs/src/main/java/org/jboss/resteasy/api/validation/ResteasyConstraintViolation.java | Java | apache-2.0 | 1,810 |
/*
* Copyright 2004-2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.compass.core.lucene.engine.optimizer;
import java.io.IOException;
import org.apache.lucene.index.LuceneSubIndexInfo;
import org.compass.core.engine.SearchEngineException;
import org.compass.core.lucene.engine.manager.LuceneSearchEngineIndexManager;
/**
* @author kimchy
*/
public abstract class AbstractIndexInfoOptimizer extends AbstractOptimizer {
protected void doOptimize(String subIndex) throws SearchEngineException {
LuceneSubIndexInfo indexInfo = doGetIndexInfo(subIndex);
if (indexInfo == null) {
return;
}
doOptimize(subIndex, indexInfo);
}
protected void doForceOptimize(String subIndex) throws SearchEngineException {
LuceneSubIndexInfo indexInfo = doGetIndexInfo(subIndex);
if (indexInfo == null) {
return;
}
doForceOptimize(subIndex, indexInfo);
}
protected LuceneSubIndexInfo doGetIndexInfo(String subIndex) {
LuceneSearchEngineIndexManager indexManager = getSearchEngineFactory().getLuceneIndexManager();
LuceneSubIndexInfo indexInfo;
try {
indexInfo = LuceneSubIndexInfo.getIndexInfo(subIndex, indexManager);
} catch (IOException e) {
throw new SearchEngineException("Failed to read index info for sub index [" + subIndex + "]", e);
}
if (indexInfo == null) {
// no index data, simply continue
return null;
}
if (!isRunning()) {
return null;
}
return indexInfo;
}
protected abstract void doOptimize(String subIndex, LuceneSubIndexInfo indexInfo) throws SearchEngineException;
protected abstract void doForceOptimize(String subIndex, LuceneSubIndexInfo indexInfo) throws SearchEngineException;
}
| baboune/compass | src/main/src/org/compass/core/lucene/engine/optimizer/AbstractIndexInfoOptimizer.java | Java | apache-2.0 | 2,416 |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.kotlin.idea.stubindex;
import com.intellij.openapi.project.Project;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.stubs.StubIndex;
import com.intellij.psi.stubs.StubIndexKey;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.kotlin.psi.KtNamedFunction;
import java.util.Collection;
/**
* Stores package top level function (both extension and non-extension) full qualified names.
*/
public class KotlinTopLevelFunctionFqnNameIndex extends AbstractStringStubIndexExtension<KtNamedFunction> {
private static final StubIndexKey<String, KtNamedFunction> KEY = KotlinIndexUtil.createIndexKey(KotlinTopLevelFunctionFqnNameIndex.class);
private static final KotlinTopLevelFunctionFqnNameIndex INSTANCE = new KotlinTopLevelFunctionFqnNameIndex();
@NotNull
public static KotlinTopLevelFunctionFqnNameIndex getInstance() {
return INSTANCE;
}
private KotlinTopLevelFunctionFqnNameIndex() {
super(KtNamedFunction.class);
}
@NotNull
@Override
public StubIndexKey<String, KtNamedFunction> getKey() {
return KEY;
}
@NotNull
@Override
public Collection<KtNamedFunction> get(@NotNull String s, @NotNull Project project, @NotNull GlobalSearchScope scope) {
return StubIndex.getElements(KEY, s, project, scope, KtNamedFunction.class);
}
// temporary hack, see comments in findCandidateDeclarationsInIndex (findDecompiledDeclaration.kt)
@NotNull
public Collection<KtNamedFunction> getNoScopeWrap(@NotNull String s, @NotNull Project project, @NotNull GlobalSearchScope scope) {
return StubIndex.getElements(KEY, s, project, scope, KtNamedFunction.class);
}
}
| smmribeiro/intellij-community | plugins/kotlin/analysis/src/org/jetbrains/kotlin/idea/stubindex/KotlinTopLevelFunctionFqnNameIndex.java | Java | apache-2.0 | 1,887 |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gov.nasa.jpl.mudrod.ssearch.ranking;
import gov.nasa.jpl.mudrod.discoveryengine.MudrodAbstract;
import gov.nasa.jpl.mudrod.driver.ESDriver;
import gov.nasa.jpl.mudrod.driver.SparkDriver;
import gov.nasa.jpl.mudrod.main.MudrodConstants;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.Properties;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
/**
* Supports the ability to importing training set into Elasticsearch
*/
public class TrainingImporter extends MudrodAbstract {
/**
*
*/
private static final long serialVersionUID = 1L;
public TrainingImporter(Properties props, ESDriver es, SparkDriver spark) {
super(props, es, spark);
es.deleteAllByQuery(props.getProperty(MudrodConstants.ES_INDEX_NAME), "trainingranking", QueryBuilders.matchAllQuery());
addMapping();
}
/**
* Method of adding mapping to traning set type
*/
public void addMapping() {
XContentBuilder Mapping;
try {
Mapping = jsonBuilder().startObject().startObject("trainingranking").startObject("properties").startObject("query").field("type", "string").field("index", "not_analyzed").endObject()
.startObject("dataID").field("type", "string").field("index", "not_analyzed").endObject().startObject("label").field("type", "string").field("index", "not_analyzed").endObject().endObject()
.endObject().endObject();
es.getClient().admin().indices().preparePutMapping(props.getProperty("indexName")).setType("trainingranking").setSource(Mapping).execute().actionGet();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Method of importing training set in to Elasticsearch
*
* @param dataFolder the path to the traing set
* @throws IOException IOException
*/
public void importTrainingSet(String dataFolder) throws IOException {
es.createBulkProcessor();
File[] files = new File(dataFolder).listFiles();
for (File file : files) {
BufferedReader br = new BufferedReader(new FileReader(file.getAbsolutePath()));
br.readLine();
String line = br.readLine();
while (line != null) {
String[] list = line.split(",");
String query = file.getName().replace(".csv", "");
if (list.length > 0) {
IndexRequest ir = new IndexRequest(props.getProperty("indexName"), "trainingranking")
.source(jsonBuilder().startObject().field("query", query).field("dataID", list[0]).field("label", list[list.length - 1]).endObject());
es.getBulkProcessor().add(ir);
}
line = br.readLine();
}
br.close();
}
es.destroyBulkProcessor();
}
}
| quintinali/mudrod | core/src/main/java/gov/nasa/jpl/mudrod/ssearch/ranking/TrainingImporter.java | Java | apache-2.0 | 3,467 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.text;
import java.io.InputStream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.pdfbox.contentstream.PDFStreamEngine;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.pdmodel.font.encoding.GlyphList;
import org.apache.pdfbox.pdmodel.common.PDRectangle;
import org.apache.pdfbox.pdmodel.font.PDFont;
import org.apache.pdfbox.pdmodel.font.PDSimpleFont;
import org.apache.pdfbox.pdmodel.font.PDType3Font;
import org.apache.pdfbox.pdmodel.graphics.state.PDGraphicsState;
import java.io.IOException;
import org.apache.pdfbox.util.Matrix;
import org.apache.pdfbox.util.Vector;
import org.apache.pdfbox.contentstream.operator.DrawObject;
import org.apache.pdfbox.contentstream.operator.state.Concatenate;
import org.apache.pdfbox.contentstream.operator.state.Restore;
import org.apache.pdfbox.contentstream.operator.state.Save;
import org.apache.pdfbox.contentstream.operator.state.SetGraphicsStateParameters;
import org.apache.pdfbox.contentstream.operator.state.SetMatrix;
import org.apache.pdfbox.contentstream.operator.text.BeginText;
import org.apache.pdfbox.contentstream.operator.text.EndText;
import org.apache.pdfbox.contentstream.operator.text.SetFontAndSize;
import org.apache.pdfbox.contentstream.operator.text.SetTextHorizontalScaling;
import org.apache.pdfbox.contentstream.operator.text.ShowTextAdjusted;
import org.apache.pdfbox.contentstream.operator.text.ShowTextLine;
import org.apache.pdfbox.contentstream.operator.text.ShowTextLineAndSpace;
import org.apache.pdfbox.contentstream.operator.text.MoveText;
import org.apache.pdfbox.contentstream.operator.text.MoveTextSetLeading;
import org.apache.pdfbox.contentstream.operator.text.NextLine;
import org.apache.pdfbox.contentstream.operator.text.SetCharSpacing;
import org.apache.pdfbox.contentstream.operator.text.SetTextLeading;
import org.apache.pdfbox.contentstream.operator.text.SetTextRenderingMode;
import org.apache.pdfbox.contentstream.operator.text.SetTextRise;
import org.apache.pdfbox.contentstream.operator.text.SetWordSpacing;
import org.apache.pdfbox.contentstream.operator.text.ShowText;
/**
* PDFStreamEngine subclass for advanced processing of text via TextPosition.
*
* @see org.apache.pdfbox.text.TextPosition
* @author Ben Litchfield
* @author John Hewson
*/
class PDFTextStreamEngine extends PDFStreamEngine
{
private static final Log LOG = LogFactory.getLog(PDFTextStreamEngine.class);
private int pageRotation;
private PDRectangle pageSize;
private final GlyphList glyphList;
/**
* Constructor.
*/
PDFTextStreamEngine() throws IOException
{
addOperator(new BeginText());
addOperator(new Concatenate());
addOperator(new DrawObject()); // special text version
addOperator(new EndText());
addOperator(new SetGraphicsStateParameters());
addOperator(new Save());
addOperator(new Restore());
addOperator(new NextLine());
addOperator(new SetCharSpacing());
addOperator(new MoveText());
addOperator(new MoveTextSetLeading());
addOperator(new SetFontAndSize());
addOperator(new ShowText());
addOperator(new ShowTextAdjusted());
addOperator(new SetTextLeading());
addOperator(new SetMatrix());
addOperator(new SetTextRenderingMode());
addOperator(new SetTextRise());
addOperator(new SetWordSpacing());
addOperator(new SetTextHorizontalScaling());
addOperator(new ShowTextLine());
addOperator(new ShowTextLineAndSpace());
// load additional glyph list for Unicode mapping
String path = "org/apache/pdfbox/resources/glyphlist/additional.txt";
InputStream input = GlyphList.class.getClassLoader().getResourceAsStream(path);
glyphList = new GlyphList(GlyphList.getAdobeGlyphList(), input);
}
/**
* This will initialise and process the contents of the stream.
*
* @param page the page to process
* @throws java.io.IOException if there is an error accessing the stream.
*/
@Override
public void processPage(PDPage page) throws IOException
{
this.pageRotation = page.getRotation();
this.pageSize = page.getCropBox();
super.processPage(page);
}
/**
* This method was originally written by Ben Litchfield for PDFStreamEngine.
*/
@Override
protected void showGlyph(Matrix textRenderingMatrix, PDFont font, int code, String unicode,
Vector displacement) throws IOException
{
//
// legacy calculations which were previously in PDFStreamEngine
//
PDGraphicsState state = getGraphicsState();
Matrix ctm = state.getCurrentTransformationMatrix();
float fontSize = state.getTextState().getFontSize();
float horizontalScaling = state.getTextState().getHorizontalScaling() / 100f;
Matrix textMatrix = getTextMatrix();
// 1/2 the bbox is used as the height todo: why?
float glyphHeight = font.getBoundingBox().getHeight() / 2;
// transformPoint from glyph space -> text space
float height = font.getFontMatrix().transformPoint(0, glyphHeight).y;
// (modified) combined displacement, this is calculated *without* taking the character
// spacing and word spacing into account, due to legacy code in TextStripper
float tx = displacement.getX() * fontSize * horizontalScaling;
float ty = 0; // todo: support vertical writing mode
// (modified) combined displacement matrix
Matrix td = Matrix.getTranslateInstance(tx, ty);
// (modified) text rendering matrix
Matrix nextTextRenderingMatrix = td.multiply(textMatrix).multiply(ctm); // text space -> device space
float nextX = nextTextRenderingMatrix.getTranslateX();
float nextY = nextTextRenderingMatrix.getTranslateY();
// (modified) width and height calculations
float dxDisplay = nextX - textRenderingMatrix.getTranslateX();
float dyDisplay = height * textRenderingMatrix.getScalingFactorY();
//
// start of the original method
//
// Note on variable names. There are three different units being used in this code.
// Character sizes are given in glyph units, text locations are initially given in text
// units, and we want to save the data in display units. The variable names should end with
// Text or Disp to represent if the values are in text or disp units (no glyph units are
// saved).
float fontSizeText = getGraphicsState().getTextState().getFontSize();
float horizontalScalingText = getGraphicsState().getTextState().getHorizontalScaling()/100f;
//Matrix ctm = getGraphicsState().getCurrentTransformationMatrix();
float glyphSpaceToTextSpaceFactor = 1 / 1000f;
if (font instanceof PDType3Font)
{
// This will typically be 1000 but in the case of a type3 font
// this might be a different number
glyphSpaceToTextSpaceFactor = 1f / font.getFontMatrix().getScaleX();
}
float spaceWidthText = 0;
try
{
// to avoid crash as described in PDFBOX-614, see what the space displacement should be
spaceWidthText = font.getSpaceWidth() * glyphSpaceToTextSpaceFactor;
}
catch (Throwable exception)
{
LOG.warn(exception, exception);
}
if (spaceWidthText == 0)
{
spaceWidthText = font.getAverageFontWidth() * glyphSpaceToTextSpaceFactor;
// the average space width appears to be higher than necessary so make it smaller
spaceWidthText *= .80f;
}
if (spaceWidthText == 0)
{
spaceWidthText = 1.0f; // if could not find font, use a generic value
}
// the space width has to be transformed into display units
float spaceWidthDisplay = spaceWidthText * fontSizeText * horizontalScalingText *
textRenderingMatrix.getScalingFactorX() * ctm.getScalingFactorX();
// use our additional glyph list for Unicode mapping
unicode = font.toUnicode(code, glyphList);
// when there is no Unicode mapping available, Acrobat simply coerces the character code
// into Unicode, so we do the same. Subclasses of PDFStreamEngine don't necessarily want
// this, which is why we leave it until this point in PDFTextStreamEngine.
if (unicode == null)
{
if (font instanceof PDSimpleFont)
{
char c = (char) code;
unicode = new String(new char[] { c });
}
else
{
// Acrobat doesn't seem to coerce composite font's character codes, instead it
// skips them. See the "allah2.pdf" TestTextStripper file.
return;
}
}
processTextPosition(new TextPosition(pageRotation, pageSize.getWidth(),
pageSize.getHeight(), textRenderingMatrix, nextX, nextY,
dyDisplay, dxDisplay,
spaceWidthDisplay, unicode, new int[] { code } , font, fontSize,
(int)(fontSize * textRenderingMatrix.getScalingFactorX())));
}
/**
* A method provided as an event interface to allow a subclass to perform some specific
* functionality when text needs to be processed.
*
* @param text The text to be processed.
*/
protected void processTextPosition(TextPosition text)
{
// subclasses can override to provide specific functionality
}
}
| ZhenyaM/veraPDF-pdfbox | pdfbox/src/main/java/org/apache/pdfbox/text/PDFTextStreamEngine.java | Java | apache-2.0 | 10,595 |
#include "test_stl.h"
#include <QSet>
#include <QtTest/QtTest>
#include <vector>
#include <cstring>
#include <cwchar>
#include "../src/utils/stl.h"
void STLTest::testBufferArry() {
using utils::BufferArray;
// constructor
std::string test1("123456789abcdefg");
BufferArray buffer(test1);
QVERIFY(test1.size() + 1 == buffer.size());
QVERIFY(buffer.capacity() == buffer.size());
QVERIFY(strncmp(test1.data(), buffer.data(), test1.size()) == 0);
// operator[]
QVERIFY(test1[0] == '1');
QVERIFY(test1[1] == '2');
QVERIFY(test1[2] == '3');
// reserve
buffer.resize(30);
QVERIFY(buffer.capacity() == 30);
// shrink_to_fit
buffer.shrink_to_fit();
QVERIFY(buffer.capacity() == buffer.size());
// resize
buffer.resize(9);
std::string test2("12345678");
QVERIFY(test2.size() + 1 == buffer.size());
QVERIFY(buffer.capacity() > buffer.size());
QVERIFY(strncmp(test2.data(), buffer.data(), test2.size()) == 0);
// shrink_to_fit
buffer.shrink_to_fit();
QVERIFY(buffer.capacity() == buffer.size());
#ifdef UTILS_CXX11_MODE
// move
std::string test3("gqjdiw913abc_123d");
BufferArray other_buffer(test3);
buffer = std::move(other_buffer);
QVERIFY(test3.size() + 1 == buffer.size());
QVERIFY(buffer.capacity() == buffer.size());
QVERIFY(strncmp(test3.data(), buffer.data(), test3.size()) == 0);
// constructor2
const char test_string[] = "abcdefg";
size_t test_size = sizeof(test_string);
buffer = BufferArray(test_string);
QVERIFY(test_size == buffer.size());
QVERIFY(buffer.capacity() == buffer.size());
QVERIFY(memcmp(test_string, buffer.data(), test_size) == 0);
#endif
}
void STLTest::testWBufferArry() {
using utils::WBufferArray;
// constructor
std::wstring test1(L"123456789abcdefg");
WBufferArray buffer(test1);
QVERIFY(test1.size() + 1 == buffer.size());
QVERIFY(buffer.capacity() == buffer.size());
QVERIFY(wcsncmp(test1.data(), buffer.data(), test1.size()) == 0);
// operator[]
QVERIFY(test1[0] == L'1');
QVERIFY(test1[1] == L'2');
QVERIFY(test1[2] == L'3');
// reserve
buffer.resize(30);
QVERIFY(buffer.capacity() == 30);
// shrink_to_fit
buffer.shrink_to_fit();
QVERIFY(buffer.capacity() == buffer.size());
// resize
buffer.resize(9);
std::wstring test2(L"12345678");
QVERIFY(test2.size() + 1 == buffer.size());
QVERIFY(buffer.capacity() > buffer.size());
QVERIFY(wcsncmp(test2.data(), buffer.data(), test2.size()) == 0);
#ifdef UTILS_CXX11_MODE
// move
std::wstring test3(L"gqjdiw913abc_123d");
WBufferArray other_buffer(test3);
buffer = std::move(other_buffer);
QVERIFY(test3.size() + 1 == buffer.size());
QVERIFY(buffer.capacity() == buffer.size());
QVERIFY(wcsncmp(test3.data(), buffer.data(), test3.size()) == 0);
// constructor2
const wchar_t test_string[] = L"abcdefg";
size_t test_size = sizeof(test_string) / sizeof(wchar_t);
buffer = WBufferArray(test_string);
QVERIFY(test_size == buffer.size());
QVERIFY(buffer.capacity() == buffer.size());
QVERIFY(memcmp(test_string, buffer.data(), test_size) == 0);
#endif
}
QTEST_APPLESS_MAIN(STLTest)
| lucius-feng/seafile-client | tests/test_stl.cpp | C++ | apache-2.0 | 3,120 |
/**
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.waveprotocol.wave.model.supplement;
import org.waveprotocol.wave.model.id.WaveletId;
import org.waveprotocol.wave.model.wave.SourcesEvents;
/**
*/
public interface ObservablePrimitiveSupplement extends PrimitiveSupplement,
SourcesEvents<ObservablePrimitiveSupplement.Listener> {
public interface Listener {
/**
* Notifies this listener that the last-read version of a blip has changed.
*/
void onLastReadBlipVersionChanged(WaveletId wid, String bid, int oldVersion, int newVersion);
/**
* Notifies this listener that the minimum last-read version of all wave
* parts has changed.
*/
void onLastReadWaveletVersionChanged(WaveletId wid, int oldVersion, int newVersion);
/**
* Notifies this listener that the last-read version of the
* participants-collection has changed.
*/
void onLastReadParticipantsVersionChanged(WaveletId wid, int oldVersion, int newVersion);
/**
* Notifies this listener that the last-read version of the tags has
* changed.
*/
void onLastReadTagsVersionChanged(WaveletId wid, int oldVersion, int newVersion);
/**
* Notifies this listener that the followed state has been set to true.
*/
void onFollowed();
/**
* Notifies this listener that the followed state has been set to false.
*/
void onUnfollowed();
/**
* Notifies this listener that the followed state has been cleared.
*/
void onFollowCleared();
/**
* Notifies this listener that last-archived version of a wavelet has
* changed.
*/
void onArchiveVersionChanged(WaveletId wid, int oldVersion, int newVersion);
/**
* Notifies this listener that archive value has been set.
*/
// TODO(hearnden/fabio) remove the 'cleared' field from the primitive model
void onArchiveClearChanged(boolean oldValue, boolean newValue);
/**
* Notifies this listener that a folder id has been added.
*/
void onFolderAdded(int newFolder);
/**
* Notifies this listener that a folder id has been removed.
*/
void onFolderRemoved(int oldFolder);
/**
* Notifies this listener that the wanted-evaluations of a wavelet has
* changed.
*/
void onWantedEvaluationsChanged(WaveletId wid);
/**
* Notifies this listener that a thread's state has been changed
* ThreadState values shall never be null.
*/
void onThreadStateChanged(WaveletId wid, String tid,
ThreadState oldState, ThreadState newState);
/**
* Notifies this listener that gadget state has been changed.
*/
void onGadgetStateChanged(String gadgetId, String key, String oldValue, String newValue);
}
}
| JaredMiller/Wave | src/org/waveprotocol/wave/model/supplement/ObservablePrimitiveSupplement.java | Java | apache-2.0 | 3,341 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.taobao.weex.dom;
import android.support.v4.util.ArrayMap;
import com.alibaba.fastjson.JSONObject;
import com.taobao.weex.dom.binding.ELUtils;
import com.taobao.weex.dom.binding.JSONUtils;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* Store value of component event
*/
public class WXEvent extends ArrayList<String> implements Serializable, Cloneable {
private static final long serialVersionUID = -8186587029452440107L;
/**
* event data format
* {
* type: 'appear',
* params: [
* { '@binding': 'index' },
* 'static',
* { '@binding': 'item.name' },
* { '@binding': '$event' }
* ]
* }
* */
public static final String EVENT_KEY_TYPE = "type";
public static final String EVENT_KEY_ARGS = "params";
/**
* dynamic binding event args, can be null, only weex use
* */
private ArrayMap mEventBindingArgs;
private ArrayMap<String, List<Object>> mEventBindingArgsValues;
@Override
public void clear() {
if(mEventBindingArgs != null){
mEventBindingArgs.clear();
}
if(mEventBindingArgsValues != null){
mEventBindingArgsValues.clear();
}
super.clear();
}
public boolean remove(String o) {
if(mEventBindingArgs != null){
mEventBindingArgs.remove(o);
}
if(mEventBindingArgsValues != null){
mEventBindingArgsValues.remove(o);
}
return super.remove(o);
}
/**
* can by null
* */
public ArrayMap getEventBindingArgs() {
return mEventBindingArgs;
}
public ArrayMap<String, List<Object>> getEventBindingArgsValues() {
return mEventBindingArgsValues;
}
public void addEvent(Object event) {
if(event instanceof CharSequence){
if(JSONUtils.isJSON(event.toString())){
addEvent(JSONUtils.toJSON(event.toString()));
return;
}
String eventName = event.toString();
if(!contains(eventName)){
add(eventName);
}
}else if(event instanceof JSONObject){
JSONObject bindings = (JSONObject) event;
addBindingEvent(bindings);
}
}
public static String getEventName(Object event){
if(event instanceof CharSequence){
return event.toString();
}else if(event instanceof JSONObject){
JSONObject bindings = (JSONObject) event;
String eventName = bindings.getString(WXEvent.EVENT_KEY_TYPE);
return eventName;
}
if(event == null){
return null;
}
return event.toString();
}
public void parseStatements() {
if(!isEmpty()){
for(int i=0; i<size(); i++){
String event = get(i);
if(JSONUtils.isJSON(event)){
JSONObject object = JSONUtils.toJSON(event);
String eventName = addBindingEvent(object);
set(i, eventName);
}
}
}
}
private String addBindingEvent(JSONObject bindings){
String eventName = bindings.getString(WXEvent.EVENT_KEY_TYPE);
Object args = bindings.get(WXEvent.EVENT_KEY_ARGS);
if (eventName != null) {
addBindingArgsEvent(eventName, args);
}
return eventName;
}
private void addBindingArgsEvent(String eventName, Object args){
if(!contains(eventName)){
add(eventName);
}
if(args != null){
if(mEventBindingArgs == null){
mEventBindingArgs = new ArrayMap();
}
mEventBindingArgs.put(eventName, ELUtils.bindingBlock(args));
}
}
public void putEventBindingArgsValue(String event, List<Object> value){
if(mEventBindingArgsValues == null){
mEventBindingArgsValues = new ArrayMap();
}
if(value == null){
mEventBindingArgsValues.remove(event);
}else{
mEventBindingArgsValues.put(event, value);
}
}
@Override
public WXEvent clone() {
WXEvent event = new WXEvent();
event.addAll(this);
if(mEventBindingArgs != null) {
event.mEventBindingArgs = new ArrayMap(mEventBindingArgs);
}
event.mEventBindingArgsValues = null; //this should not be clone, it dynamic args
return event;
}
}
| erha19/incubator-weex | android/sdk/src/main/java/com/taobao/weex/dom/WXEvent.java | Java | apache-2.0 | 4,905 |
package org.jb2011.lnf.beautyeye.winlnfutils.d;
///*
// * @(#)XPStyle.java 1.28 07/01/09
// *
// * Copyright 2006 Sun Microsystems, Inc. All rights reserved.
// * SUN PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
// */
//
///*
// * <p>These classes are designed to be used while the
// * corresponding <code>LookAndFeel</code> class has been installed
// * (<code>UIManager.setLookAndFeel(new <i>XXX</i>LookAndFeel())</code>).
// * Using them while a different <code>LookAndFeel</code> is installed
// * may produce unexpected results, including exceptions.
// * Additionally, changing the <code>LookAndFeel</code>
// * maintained by the <code>UIManager</code> without updating the
// * corresponding <code>ComponentUI</code> of any
// * <code>JComponent</code>s may also produce unexpected results,
// * such as the wrong colors showing up, and is generally not
// * encouraged.
// *
// */
//
//package org.jb2011.lnf.beautyeye.winlnfutils;
//
//import java.awt.Color;
//import java.awt.Component;
//import java.awt.Dimension;
//import java.awt.Graphics;
//import java.awt.GraphicsConfiguration;
//import java.awt.Image;
//import java.awt.Insets;
//import java.awt.Point;
//import java.awt.Rectangle;
//import java.awt.Toolkit;
//import java.awt.image.BufferedImage;
//import java.awt.image.DataBufferInt;
//import java.awt.image.WritableRaster;
//import java.security.AccessController;
//import java.util.HashMap;
//
//import javax.swing.AbstractButton;
//import javax.swing.JButton;
//import javax.swing.JCheckBox;
//import javax.swing.JRadioButton;
//import javax.swing.JToolBar;
//import javax.swing.UIManager;
//import javax.swing.border.AbstractBorder;
//import javax.swing.border.Border;
//import javax.swing.border.EmptyBorder;
//import javax.swing.border.LineBorder;
//import javax.swing.plaf.ColorUIResource;
//import javax.swing.plaf.InsetsUIResource;
//import javax.swing.plaf.UIResource;
//import javax.swing.text.JTextComponent;
//
//import org.jb2011.lnf.beautyeye.winlnfutils.BETMSchema.Part;
//import org.jb2011.lnf.beautyeye.winlnfutils.BETMSchema.Prop;
//import org.jb2011.lnf.beautyeye.winlnfutils.BETMSchema.State;
//import org.jb2011.lnf.beautyeye.winlnfutils.BETMSchema.TypeEnum;
//
//import sun.awt.windows.ThemeReader;
//import sun.security.action.GetPropertyAction;
//import sun.swing.CachedPainter;
//
//import com.sun.java.swing.plaf.windows.WindowsClassicLookAndFeel;
//
///*
// * 本类实际就是XP主题包中的类,未作任何改变.
// * 代码参考java源码,仅作兼容性修改
// * Add by js 2009-09-01.
// */
///**
// * Implements Windows XP Styles for the Windows Look and Feel.
// *
// * @version 1.28 01/09/07
// * @author Leif Samuelsson
// */
//public class BEXPStyle {
// // Singleton instance of this class
// private static BEXPStyle xp;
//
// // Singleton instance of SkinPainter
// private static SkinPainter skinPainter = new SkinPainter();
//
// private static Boolean themeActive = null;
//
// private HashMap<String, Border> borderMap;
// private HashMap<String, Color> colorMap;
//
// private boolean flatMenus;
//
// static {
// invalidateStyle();
// }
//
// /** Static method for clearing the hashmap and loading the
// * current XP style and theme
// */
// static synchronized void invalidateStyle() {
// xp = null;
// themeActive = null;
// }
//
// /** Get the singleton instance of this class
// *
// * @return the singleton instance of this class or null if XP styles
// * are not active or if this is not Windows XP
// */
// public static synchronized BEXPStyle getXP() {
// if (themeActive == null) {
// Toolkit toolkit = Toolkit.getDefaultToolkit();
// themeActive =
// (Boolean)toolkit.getDesktopProperty("win.xpstyle.themeActive");
// if (themeActive == null) {
// themeActive = Boolean.FALSE;
// }
// if (themeActive.booleanValue()) {
// GetPropertyAction propertyAction =
// new GetPropertyAction("swing.noxp");
// if (AccessController.doPrivileged(propertyAction) == null &&
// ThemeReader.isThemed() &&
// !(UIManager.getLookAndFeel()
// instanceof WindowsClassicLookAndFeel)) {
//
// xp = new BEXPStyle();
// }
// }
// }
// return xp;
// }
//
//
//
// /** Get a named <code>String</code> value from the current style
// *
// * @param part a <code>Part</code>
// * @param state a <code>String</code>
// * @param attributeKey a <code>String</code>
// * @return a <code>String</code> or null if key is not found
// * in the current style
// *
// * This is currently only used by WindowsInternalFrameTitlePane for painting
// * title foregound and can be removed when no longer needed
// */
// String getString(Component c, Part part, State state, Prop prop) {
// return getTypeEnumName(c, part, state, prop);
// }
//
// private static String getTypeEnumName(Component c, Part part, State state, Prop prop) {
// int enumValue = ThemeReader.getEnum(part.getControlName(c), part.getValue(),
// State.getValue(part, state),
// prop.getValue());
// if (enumValue == -1) {
// return null;
// }
// return TypeEnum.getTypeEnum(prop, enumValue).getName();
// }
//
//
//
//
// /** Get a named <code>int</code> value from the current style
// *
// * @param part a <code>Part</code>
// * @return an <code>int</code> or null if key is not found
// * in the current style
// */
// int getInt(Component c, Part part, State state, Prop prop, int fallback) {
// return ThemeReader.getInt(part.getControlName(c), part.getValue(),
// State.getValue(part, state),
// prop.getValue());
// }
//
// /** Get a named <code>Dimension</code> value from the current style
// *
// * @param key a <code>String</code>
// * @return a <code>Dimension</code> or null if key is not found
// * in the current style
// *
// * This is currently only used by WindowsProgressBarUI and the value
// * should probably be cached there instead of here.
// */
// Dimension getDimension(Component c, Part part, State state, Prop prop) {
// return ThemeReader.getPosition(part.getControlName(c), part.getValue(),
// State.getValue(part, state),
// prop.getValue());
// }
//
// /** Get a named <code>Point</code> (e.g. a location or an offset) value
// * from the current style
// *
// * @param key a <code>String</code>
// * @return a <code>Point</code> or null if key is not found
// * in the current style
// *
// * This is currently only used by WindowsInternalFrameTitlePane for painting
// * title foregound and can be removed when no longer needed
// */
// Point getPoint(Component c, Part part, State state, Prop prop) {
// Dimension d = ThemeReader.getPosition(part.getControlName(c), part.getValue(),
// State.getValue(part, state),
// prop.getValue());
// if (d != null) {
// return new Point(d.width, d.height);
// } else {
// return null;
// }
// }
//
// /** Get a named <code>Insets</code> value from the current style
// *
// * @param key a <code>String</code>
// * @return an <code>Insets</code> object or null if key is not found
// * in the current style
// *
// * This is currently only used to create borders and by
// * WindowsInternalFrameTitlePane for painting title foregound.
// * The return value is already cached in those places.
// */
// Insets getMargin(Component c, Part part, State state, Prop prop) {
// return ThemeReader.getThemeMargins(part.getControlName(c), part.getValue(),
// State.getValue(part, state),
// prop.getValue());
// }
//
//
// /** Get a named <code>Color</code> value from the current style
// *
// * @param part a <code>Part</code>
// * @return a <code>Color</code> or null if key is not found
// * in the current style
// */
// synchronized Color getColor(Skin skin, Prop prop, Color fallback) {
// String key = skin.toString() + "." + prop.name();
// Part part = skin.part;
// Color color = colorMap.get(key);
// if (color == null) {
// color = ThemeReader.getColor(part.getControlName(null), part.getValue(),
// State.getValue(part, skin.state),
// prop.getValue());
// if (color != null) {
// color = new ColorUIResource(color);
// colorMap.put(key, color);
// }
// }
// return (color != null) ? color : fallback;
// }
//
// public Color getColor(Component c, Part part, State state, Prop prop, Color fallback) {
// return getColor(new Skin(c, part, state), prop, fallback);
// }
//
//
//
// /** Get a named <code>Border</code> value from the current style
// *
// * @param part a <code>Part</code>
// * @return a <code>Border</code> or null if key is not found
// * in the current style or if the style for the particular
// * part is not defined as "borderfill".
// */
// public synchronized Border getBorder(Component c, Part part) {
// if (part == Part.MENU) {
// // Special case because XP has no skin for menus
// if (flatMenus) {
// // TODO: The classic border uses this color, but we should
// // create a new UI property called "PopupMenu.borderColor"
// // instead.
// return new XPFillBorder(UIManager.getColor("InternalFrame.borderShadow"),
// 1);
// } else {
// return null; // Will cause L&F to use classic border
// }
// }
// Skin skin = new Skin(c, part, null);
// Border border = borderMap.get(skin.string);
// if (border == null) {
// String bgType = getTypeEnumName(c, part, null, Prop.BGTYPE);
// if ("borderfill".equalsIgnoreCase(bgType)) {
// int thickness = getInt(c, part, null, Prop.BORDERSIZE, 1);
// Color color = getColor(skin, Prop.BORDERCOLOR, Color.black);
// border = new XPFillBorder(color, thickness);
// } else if ("imagefile".equalsIgnoreCase(bgType)) {
// Insets m = getMargin(c, part, null, Prop.SIZINGMARGINS);
// if (m != null) {
// if (getBoolean(c, part, null, Prop.BORDERONLY)) {
// border = new XPImageBorder(c, part);
// } else {
// if(part == Part.TP_BUTTON) {
// border = new XPEmptyBorder(new Insets(3,3,3,3));
// } else {
// border = new XPEmptyBorder(m);
// }
// }
// }
// }
// if (border != null) {
// borderMap.put(skin.string, border);
// }
// }
// return border;
// }
//
// private class XPFillBorder extends LineBorder implements UIResource {
// XPFillBorder(Color color, int thickness) {
// super(color, thickness);
// }
//
// public Insets getBorderInsets(Component c) {
// return getBorderInsets(c, new Insets(0,0,0,0));
// }
//
// public Insets getBorderInsets(Component c, Insets insets) {
// Insets margin = null;
// //
// // Ideally we'd have an interface defined for classes which
// // support margins (to avoid this hackery), but we've
// // decided against it for simplicity
// //
// if (c instanceof AbstractButton) {
// margin = ((AbstractButton)c).getMargin();
// } else if (c instanceof JToolBar) {
// margin = ((JToolBar)c).getMargin();
// } else if (c instanceof JTextComponent) {
// margin = ((JTextComponent)c).getMargin();
// }
// insets.top = (margin != null? margin.top : 0) + thickness;
// insets.left = (margin != null? margin.left : 0) + thickness;
// insets.bottom = (margin != null? margin.bottom : 0) + thickness;
// insets.right = (margin != null? margin.right : 0) + thickness;
//
// return insets;
// }
// }
//
// private class XPImageBorder extends AbstractBorder implements UIResource {
// Skin skin;
//
// XPImageBorder(Component c, Part part) {
// this.skin = getSkin(c, part);
// }
//
// public void paintBorder(Component c, Graphics g,
// int x, int y, int width, int height) {
// skin.paintSkin(g, x, y, width, height, null);
// }
//
// public Insets getBorderInsets(Component c) {
// return getBorderInsets(c, new Insets(0,0,0,0));
// }
//
// public Insets getBorderInsets(Component c, Insets insets) {
// Insets margin = null;
// Insets borderInsets = skin.getContentMargin();
// //
// // Ideally we'd have an interface defined for classes which
// // support margins (to avoid this hackery), but we've
// // decided against it for simplicity
// //
// if (c instanceof AbstractButton) {
// margin = ((AbstractButton)c).getMargin();
// } else if (c instanceof JToolBar) {
// margin = ((JToolBar)c).getMargin();
// } else if (c instanceof JTextComponent) {
// margin = ((JTextComponent)c).getMargin();
// }
// insets.top = (margin != null? margin.top : 0) + borderInsets.top;
// insets.left = (margin != null? margin.left : 0) + borderInsets.left;
// insets.bottom = (margin != null? margin.bottom : 0) + borderInsets.bottom;
// insets.right = (margin != null? margin.right : 0) + borderInsets.right;
//
// return insets;
// }
// }
//
// private class XPEmptyBorder extends EmptyBorder implements UIResource {
// XPEmptyBorder(Insets m) {
// super(m.top+2, m.left+2, m.bottom+2, m.right+2);
// }
//
// public Insets getBorderInsets(Component c) {
// return getBorderInsets(c, getBorderInsets());
// }
//
// public Insets getBorderInsets(Component c, Insets insets) {
// insets = super.getBorderInsets(c, insets);
//
// Insets margin = null;
// if (c instanceof AbstractButton) {
// Insets m = ((AbstractButton)c).getMargin();
// // if this is a toolbar button then ignore getMargin()
// // and subtract the padding added by the constructor
// if(c.getParent() instanceof JToolBar
// && ! (c instanceof JRadioButton)
// && ! (c instanceof JCheckBox)
// && m instanceof InsetsUIResource) {
// insets.top -= 2;
// insets.left -= 2;
// insets.bottom -= 2;
// insets.right -= 2;
// } else {
// margin = m;
// }
// } else if (c instanceof JToolBar) {
// margin = ((JToolBar)c).getMargin();
// } else if (c instanceof JTextComponent) {
// margin = ((JTextComponent)c).getMargin();
// }
// if (margin != null) {
// insets.top = margin.top + 2;
// insets.left = margin.left + 2;
// insets.bottom = margin.bottom + 2;
// insets.right = margin.right + 2;
// }
// return insets;
// }
// }
//
// public boolean isSkinDefined(Component c, Part part) {
// return (part.getValue() == 0)
// || ThemeReader.isThemePartDefined(
// part.getControlName(c), part.getValue(), 0);
// }
//
// /** Get a <code>Skin</code> object from the current style
// * for a named part (component type)
// *
// * @param part a <code>Part</code>
// * @return a <code>Skin</code> object
// */
// public synchronized Skin getSkin(Component c, Part part) {
// assert isSkinDefined(c, part) : "part " + part + " is not defined";
// return new Skin(c, part, null);
// }
//
//
//
//
// /** A class which encapsulates attributes for a given part
// * (component type) and which provides methods for painting backgrounds
// * and glyphs
// */
// public static class Skin {
// final Component component;
// final Part part;
// final State state;
//
// private final String string;
// private Dimension size = null;
//
// Skin(Component component, Part part) {
// this(component, part, null);
// }
//
// Skin(Part part, State state) {
// this(null, part, state);
// }
//
// Skin(Component component, Part part, State state) {
// this.component = component;
// this.part = part;
// this.state = state;
//
// String str = part.getControlName(component) +"." + part.name();
// if (state != null) {
// str += "("+state.name()+")";
// }
// string = str;
// }
//
// public Insets getContentMargin() {
// // This is only called by WindowsTableHeaderUI so far.
// return ThemeReader.getThemeMargins(part.getControlName(null), part.getValue(),
// 0, Prop.SIZINGMARGINS.getValue());
// }
//
// private int getWidth(State state) {
// if (size == null) {
// size = getPartSize(part, state);
// }
// return size.width;
// }
//
// public int getWidth() {
// return getWidth((state != null) ? state : State.NORMAL);
// }
//
// private int getHeight(State state) {
// if (size == null) {
// size = getPartSize(part, state);
// }
// return size.height;
// }
//
// public int getHeight() {
// return getHeight((state != null) ? state : State.NORMAL);
// }
//
// public String toString() {
// return string;
// }
//
// public boolean equals(Object obj) {
// return (obj instanceof Skin && ((Skin)obj).string.equals(string));
// }
//
// public int hashCode() {
// return string.hashCode();
// }
//
// /** Paint a skin at x, y.
// *
// * @param g the graphics context to use for painting
// * @param dx the destination <i>x</i> coordinate.
// * @param dy the destination <i>y</i> coordinate.
// * @param state which state to paint
// */
// public void paintSkin(Graphics g, int dx, int dy, State state) {
// if (state == null) {
// state = this.state;
// }
// paintSkin(g, dx, dy, getWidth(state), getHeight(state), state);
// }
//
// /** Paint a skin in an area defined by a rectangle.
// *
// * @param g the graphics context to use for painting
// * @param r a <code>Rectangle</code> defining the area to fill,
// * may cause the image to be stretched or tiled
// * @param state which state to paint
// */
// void paintSkin(Graphics g, Rectangle r, State state) {
// paintSkin(g, r.x, r.y, r.width, r.height, state);
// }
//
// /** Paint a skin at a defined position and size
// *
// * @param g the graphics context to use for painting
// * @param dx the destination <i>x</i> coordinate.
// * @param dy the destination <i>y</i> coordinate.
// * @param dw the width of the area to fill, may cause
// * the image to be stretched or tiled
// * @param dh the height of the area to fill, may cause
// * the image to be stretched or tiled
// * @param state which state to paint
// */
// public void paintSkin(Graphics g, int dx, int dy, int dw, int dh, State state) {
// skinPainter.paint(null, g, dx, dy, dw, dh, this, state);
// }
// /**
// * Paint a skin at a defined position and size
// *
// * @param g the graphics context to use for painting
// * @param dx the destination <i>x</i> coordinate
// * @param dy the destination <i>y</i> coordinate
// * @param dw the width of the area to fill, may cause
// * the image to be stretched or tiled
// * @param dh the height of the area to fill, may cause
// * the image to be stretched or tiled
// * @param state which state to paint
// * @param borderFill should test if the component uses a border fill
// * and skip painting if it is
// */
// void paintSkin(Graphics g, int dx, int dy, int dw, int dh, State state,
// boolean borderFill) {
// if(borderFill && "borderfill".equals(getTypeEnumName(component, part,
// state, Prop.BGTYPE))) {
// return;
// }
// skinPainter.paint(null, g, dx, dy, dw, dh, this, state);
// }
// }
//
// private static class SkinPainter extends CachedPainter {
// SkinPainter() {
// super(30);
// flush();
// }
//
// protected void paintToImage(Component c, Image image, Graphics g,
// int w, int h, Object[] args) {
// Skin skin = (Skin)args[0];
// Part part = skin.part;
// State state = (State)args[1];
// if (state == null) {
// state = skin.state;
// }
// if (c == null) {
// c = skin.component;
// }
// WritableRaster raster = ((BufferedImage)image).getRaster();
// DataBufferInt buffer = (DataBufferInt)raster.getDataBuffer();
// ThemeReader.paintBackground(buffer.getData(),
// part.getControlName(c), part.getValue(),
// State.getValue(part, state),
// 0, 0, w, h, w);
// }
//
// protected Image createImage(Component c, int w, int h,
// GraphicsConfiguration config, Object[] args) {
// return new BufferedImage(w, h, BufferedImage.TYPE_INT_ARGB);
// }
// }
//
// static class GlyphButton extends JButton {
// private Skin skin;
//
// public GlyphButton(Component parent, Part part) {
// BEXPStyle xp = getXP();
// skin = xp.getSkin(parent, part);
// setBorder(null);
// setContentAreaFilled(false);
// }
//
// public boolean isFocusTraversable() {
// return false;
// }
//
// protected State getState() {
// State state = State.NORMAL;
// if (!isEnabled()) {
// state = State.DISABLED;
// } else if (getModel().isPressed()) {
// state = State.PRESSED;
// } else if (getModel().isRollover()) {
// state = State.HOT;
// }
// return state;
// }
//
// public void paintComponent(Graphics g) {
// Dimension d = getSize();
// skin.paintSkin(g, 0, 0, d.width, d.height, getState());
// }
//
// public void setPart(Component parent, Part part) {
// BEXPStyle xp = getXP();
// skin = xp.getSkin(parent, part);
// revalidate();
// repaint();
// }
//
// protected void paintBorder(Graphics g) {
// }
//
// public Dimension getPreferredSize() {
// return new Dimension(16, 16);
// }
//
// public Dimension getMinimumSize() {
// return new Dimension(5, 5);
// }
//
// public Dimension getMaximumSize() {
// return new Dimension(Integer.MAX_VALUE, Integer.MAX_VALUE);
// }
// }
//
// // Private constructor
// private BEXPStyle() {
// flatMenus = getSysBoolean(Prop.FLATMENUS);
//
// colorMap = new HashMap<String, Color>();
// borderMap = new HashMap<String, Border>();
// // Note: All further access to the maps must be synchronized
// }
//
//
// private boolean getBoolean(Component c, Part part, State state, Prop prop) {
// return ThemeReader.getBoolean(part.getControlName(c), part.getValue(),
// State.getValue(part, state),
// prop.getValue());
// }
//
// private static Dimension getPartSize(Part part, State state) {
// return ThemeReader.getPartSize(part.getControlName(null), part.getValue(),
// State.getValue(part, state));
// }
//
// private static boolean getSysBoolean(Prop prop) {
// // We can use any widget name here, I guess.
// return ThemeReader.getSysBoolean("window", prop.getValue());
// }
//}
| JackJiang2011/beautyeye | src_all/beautyeye_v3.7_utf8/src/org/jb2011/lnf/beautyeye/winlnfutils/d/BEXPStyle.java | Java | apache-2.0 | 26,390 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.redis.internal.executor.transactions;
import io.netty.buffer.ByteBuf;
import java.util.Queue;
import org.apache.geode.cache.CacheTransactionManager;
import org.apache.geode.cache.CommitConflictException;
import org.apache.geode.cache.TransactionId;
import org.apache.geode.redis.internal.Coder;
import org.apache.geode.redis.internal.Command;
import org.apache.geode.redis.internal.ExecutionHandlerContext;
import org.apache.geode.redis.internal.RedisConstants;
public class ExecExecutor extends TransactionExecutor {
@Override
public void executeCommand(Command command, ExecutionHandlerContext context) {
CacheTransactionManager txm = context.getCacheTransactionManager();
if (!context.hasTransaction()) {
command.setResponse(Coder.getNilResponse(context.getByteBufAllocator()));
return;
}
TransactionId transactionId = context.getTransactionID();
txm.resume(transactionId);
boolean hasError = hasError(context.getTransactionQueue());
if (hasError)
txm.rollback();
else {
try {
txm.commit();
} catch (CommitConflictException e) {
command.setResponse(Coder.getErrorResponse(context.getByteBufAllocator(),
RedisConstants.ERROR_COMMIT_CONFLICT));
context.clearTransaction();
return;
}
}
ByteBuf response = constructResponseExec(context);
command.setResponse(response);
context.clearTransaction();
}
private ByteBuf constructResponseExec(ExecutionHandlerContext context) {
Queue<Command> cQ = context.getTransactionQueue();
ByteBuf response = context.getByteBufAllocator().buffer();
response.writeByte(Coder.ARRAY_ID);
response.writeBytes(Coder.intToBytes(cQ.size()));
response.writeBytes(Coder.CRLFar);
for (Command c : cQ) {
ByteBuf r = c.getResponse();
response.writeBytes(r);
}
return response;
}
private boolean hasError(Queue<Command> queue) {
for (Command c : queue) {
if (c.hasError())
return true;
}
return false;
}
}
| charliemblack/geode | geode-core/src/main/java/org/apache/geode/redis/internal/executor/transactions/ExecExecutor.java | Java | apache-2.0 | 2,868 |
import type { Config } from '../src/core/config'
import type VNode from '../src/core/vdom/vnode'
import type Watcher from '../src/core/observer/watcher'
declare interface Component {
// constructor information
static cid: number;
static options: Object;
// extend
static extend: (options: Object) => Function;
static superOptions: Object;
static extendOptions: Object;
static sealedOptions: Object;
static super: Class<Component>;
// assets
static directive: (id: string, def?: Function | Object) => Function | Object | void;
static component: (id: string, def?: Class<Component> | Object) => Class<Component>;
static filter: (id: string, def?: Function) => Function | void;
// public properties
$el: any; // so that we can attach __vue__ to it
$data: Object;
$options: ComponentOptions;
$parent: Component | void;
$root: Component;
$children: Array<Component>;
$refs: { [key: string]: Component | Element | Array<Component | Element> | void };
$slots: { [key: string]: Array<VNode> };
$scopedSlots: { [key: string]: () => VNodeChildren };
$vnode: VNode; // the placeholder node for the component in parent's render tree
$isServer: boolean;
$props: Object;
// public methods
$mount: (el?: Element | string, hydrating?: boolean) => Component;
$forceUpdate: () => void;
$destroy: () => void;
$set: <T>(target: Object | Array<T>, key: string | number, val: T) => T;
$delete: <T>(target: Object | Array<T>, key: string | number) => void;
$watch: (expOrFn: string | Function, cb: Function, options?: Object) => Function;
$on: (event: string | Array<string>, fn: Function) => Component;
$once: (event: string, fn: Function) => Component;
$off: (event?: string | Array<string>, fn?: Function) => Component;
$emit: (event: string, ...args: Array<mixed>) => Component;
$nextTick: (fn: Function) => void | Promise<*>;
$createElement: (tag?: string | Component, data?: Object, children?: VNodeChildren) => VNode;
// private properties
_uid: number;
_name: string; // this only exists in dev mode
_isVue: true;
_self: Component;
_renderProxy: Component;
_renderContext: ?Component;
_watcher: Watcher;
_watchers: Array<Watcher>;
_computedWatchers: { [key: string]: Watcher };
_data: Object;
_props: Object;
_events: Object;
_inactive: boolean | null;
_directInactive: boolean;
_isMounted: boolean;
_isDestroyed: boolean;
_isBeingDestroyed: boolean;
_vnode: ?VNode; // self root node
_staticTrees: ?Array<VNode>;
_hasHookEvent: boolean;
_provided: ?Object;
// private methods
// lifecycle
_init: Function;
_mount: (el?: Element | void, hydrating?: boolean) => Component;
_update: (vnode: VNode, hydrating?: boolean) => void;
// rendering
_render: () => VNode;
__patch__: (a: Element | VNode | void, b: VNode) => any;
// createElement
// _c is internal that accepts `normalizationType` optimization hint
_c: (vnode?: VNode, data?: VNodeData, children?: VNodeChildren, normalizationType?: number) => VNode | void;
// renderStatic
_m: (index: number, isInFor?: boolean) => VNode | VNodeChildren;
// markOnce
_o: (vnode: VNode | Array<VNode>, index: number, key: string) => VNode | VNodeChildren;
// toString
_s: (value: mixed) => string;
// text to VNode
_v: (value: string | number) => VNode;
// toNumber
_n: (value: string) => number | string;
// empty vnode
_e: () => VNode;
// loose equal
_q: (a: mixed, b: mixed) => boolean;
// loose indexOf
_i: (arr: Array<mixed>, val: mixed) => number;
// resolveFilter
_f: (id: string) => Function;
// renderList
_l: (val: mixed, render: Function) => ?Array<VNode>;
// renderSlot
_t: (name: string, fallback: ?Array<VNode>, props: ?Object) => ?Array<VNode>;
// apply v-bind object
_b: (data: any, value: any, asProp?: boolean) => VNodeData;
// check custom keyCode
_k: (eventKeyCode: number, key: string, builtInAlias: number | Array<number> | void) => boolean;
// resolve scoped slots
_u: (scopedSlots: ScopedSlotsData, res?: Object) => { [key: string]: Function };
// allow dynamic method registration
[key: string]: any
}
| search5/nanumlectures | static/bower_components/vue/flow/component.js | JavaScript | apache-2.0 | 4,160 |
/* ****************************************************************************
*
* Copyright (c) Microsoft Corporation.
*
* This source code is subject to terms and conditions of the Apache License, Version 2.0. A
* copy of the license can be found in the License.html file at the root of this distribution. If
* you cannot locate the Apache License, Version 2.0, please send an email to
* vspython@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
* by the terms of the Apache License, Version 2.0.
*
* You must not remove this notice, or any other, from this software.
*
* ***************************************************************************/
using System;
using System.Globalization;
using System.Runtime.InteropServices;
using EnvDTE;
using Microsoft.VisualStudio;
using Microsoft.VisualStudio.Shell.Interop;
namespace Microsoft.VisualStudioTools.Project.Automation {
[ComVisible(true)]
public class OAProject : EnvDTE.Project, EnvDTE.ISupportVSProperties {
#region fields
private ProjectNode project;
EnvDTE.ConfigurationManager configurationManager;
#endregion
#region properties
public object Project {
get { return this.project; }
}
internal ProjectNode ProjectNode {
get { return this.project; }
}
#endregion
#region ctor
internal OAProject(ProjectNode project) {
this.project = project;
}
#endregion
#region EnvDTE.Project
/// <summary>
/// Gets or sets the name of the object.
/// </summary>
public virtual string Name {
get {
return project.Caption;
}
set {
CheckProjectIsValid();
using (AutomationScope scope = new AutomationScope(this.project.Site)) {
ProjectNode.Site.GetUIThread().Invoke(() => {
project.SetEditLabel(value);
});
}
}
}
public void Dispose() {
configurationManager = null;
}
/// <summary>
/// Microsoft Internal Use Only. Gets the file name of the project.
/// </summary>
public virtual string FileName {
get {
return project.ProjectFile;
}
}
/// <summary>
/// Microsoft Internal Use Only. Specfies if the project is dirty.
/// </summary>
public virtual bool IsDirty {
get {
int dirty;
ErrorHandler.ThrowOnFailure(project.IsDirty(out dirty));
return dirty != 0;
}
set {
CheckProjectIsValid();
using (AutomationScope scope = new AutomationScope(this.project.Site)) {
ProjectNode.Site.GetUIThread().Invoke(() => {
project.isDirty = value;
});
}
}
}
internal void CheckProjectIsValid() {
if (this.project == null || this.project.Site == null || this.project.IsClosed) {
throw new InvalidOperationException();
}
}
/// <summary>
/// Gets the Projects collection containing the Project object supporting this property.
/// </summary>
public virtual EnvDTE.Projects Collection {
get { return null; }
}
/// <summary>
/// Gets the top-level extensibility object.
/// </summary>
public virtual EnvDTE.DTE DTE {
get {
return (EnvDTE.DTE)this.project.Site.GetService(typeof(EnvDTE.DTE));
}
}
/// <summary>
/// Gets a GUID string indicating the kind or type of the object.
/// </summary>
public virtual string Kind {
get { return project.ProjectGuid.ToString("B"); }
}
/// <summary>
/// Gets a ProjectItems collection for the Project object.
/// </summary>
public virtual EnvDTE.ProjectItems ProjectItems {
get {
return new OAProjectItems(this, project);
}
}
/// <summary>
/// Gets a collection of all properties that pertain to the Project object.
/// </summary>
public virtual EnvDTE.Properties Properties {
get {
return new OAProperties(this.project.NodeProperties);
}
}
/// <summary>
/// Returns the name of project as a relative path from the directory containing the solution file to the project file
/// </summary>
/// <value>Unique name if project is in a valid state. Otherwise null</value>
public virtual string UniqueName {
get {
if (this.project == null || this.project.IsClosed) {
return null;
} else {
// Get Solution service
IVsSolution solution = this.project.GetService(typeof(IVsSolution)) as IVsSolution;
Utilities.CheckNotNull(solution);
// Ask solution for unique name of project
string uniqueName;
ErrorHandler.ThrowOnFailure(
solution.GetUniqueNameOfProject(
project.GetOuterInterface<IVsHierarchy>(),
out uniqueName
)
);
return uniqueName;
}
}
}
/// <summary>
/// Gets an interface or object that can be accessed by name at run time.
/// </summary>
public virtual object Object {
get { return this.project.Object; }
}
/// <summary>
/// Gets the requested Extender object if it is available for this object.
/// </summary>
/// <param name="name">The name of the extender object.</param>
/// <returns>An Extender object. </returns>
public virtual object get_Extender(string name) {
Utilities.ArgumentNotNull("name", name);
return DTE.ObjectExtenders.GetExtender(project.NodeProperties.ExtenderCATID.ToUpper(), name, project.NodeProperties);
}
/// <summary>
/// Gets a list of available Extenders for the object.
/// </summary>
public virtual object ExtenderNames {
get { return DTE.ObjectExtenders.GetExtenderNames(project.NodeProperties.ExtenderCATID.ToUpper(), project.NodeProperties); }
}
/// <summary>
/// Gets the Extender category ID (CATID) for the object.
/// </summary>
public virtual string ExtenderCATID {
get { return project.NodeProperties.ExtenderCATID; }
}
/// <summary>
/// Gets the full path and name of the Project object's file.
/// </summary>
public virtual string FullName {
get {
string filename;
uint format;
ErrorHandler.ThrowOnFailure(project.GetCurFile(out filename, out format));
return filename;
}
}
/// <summary>
/// Gets or sets a value indicatingwhether the object has not been modified since last being saved or opened.
/// </summary>
public virtual bool Saved {
get {
return !this.IsDirty;
}
set {
IsDirty = !value;
}
}
/// <summary>
/// Gets the ConfigurationManager object for this Project .
/// </summary>
public virtual EnvDTE.ConfigurationManager ConfigurationManager {
get {
return ProjectNode.Site.GetUIThread().Invoke(() => {
if (this.configurationManager == null) {
IVsExtensibility3 extensibility = this.project.Site.GetService(typeof(IVsExtensibility)) as IVsExtensibility3;
Utilities.CheckNotNull(extensibility);
object configurationManagerAsObject;
ErrorHandler.ThrowOnFailure(extensibility.GetConfigMgr(
this.project.GetOuterInterface<IVsHierarchy>(),
VSConstants.VSITEMID_ROOT,
out configurationManagerAsObject
));
Utilities.CheckNotNull(configurationManagerAsObject);
this.configurationManager = (ConfigurationManager)configurationManagerAsObject;
}
return this.configurationManager;
});
}
}
/// <summary>
/// Gets the Globals object containing add-in values that may be saved in the solution (.sln) file, the project file, or in the user's profile data.
/// </summary>
public virtual EnvDTE.Globals Globals {
get { return null; }
}
/// <summary>
/// Gets a ProjectItem object for the nested project in the host project.
/// </summary>
public virtual EnvDTE.ProjectItem ParentProjectItem {
get { return null; }
}
/// <summary>
/// Gets the CodeModel object for the project.
/// </summary>
public virtual EnvDTE.CodeModel CodeModel {
get { return null; }
}
/// <summary>
/// Saves the project.
/// </summary>
/// <param name="fileName">The file name with which to save the solution, project, or project item. If the file exists, it is overwritten</param>
/// <exception cref="InvalidOperationException">Is thrown if the save operation failes.</exception>
/// <exception cref="ArgumentNullException">Is thrown if fileName is null.</exception>
public virtual void SaveAs(string fileName) {
ProjectNode.Site.GetUIThread().Invoke(() => {
this.DoSave(true, fileName);
});
}
/// <summary>
/// Saves the project
/// </summary>
/// <param name="fileName">The file name of the project</param>
/// <exception cref="InvalidOperationException">Is thrown if the save operation failes.</exception>
/// <exception cref="ArgumentNullException">Is thrown if fileName is null.</exception>
public virtual void Save(string fileName) {
ProjectNode.Site.GetUIThread().Invoke(() => {
this.DoSave(false, fileName);
});
}
/// <summary>
/// Removes the project from the current solution.
/// </summary>
public virtual void Delete() {
CheckProjectIsValid();
using (AutomationScope scope = new AutomationScope(this.project.Site)) {
ProjectNode.Site.GetUIThread().Invoke(() => {
this.project.Remove(false);
});
}
}
#endregion
#region ISupportVSProperties methods
/// <summary>
/// Microsoft Internal Use Only.
/// </summary>
public virtual void NotifyPropertiesDelete() {
}
#endregion
#region private methods
/// <summary>
/// Saves or Save Asthe project.
/// </summary>
/// <param name="isCalledFromSaveAs">Flag determining which Save method called , the SaveAs or the Save.</param>
/// <param name="fileName">The name of the project file.</param>
private void DoSave(bool isCalledFromSaveAs, string fileName) {
Utilities.ArgumentNotNull("fileName", fileName);
CheckProjectIsValid();
using (AutomationScope scope = new AutomationScope(this.project.Site)) {
// If an empty file name is passed in for Save then make the file name the project name.
if (!isCalledFromSaveAs && string.IsNullOrEmpty(fileName)) {
// Use the solution service to save the project file. Note that we have to use the service
// so that all the shell's elements are aware that we are inside a save operation and
// all the file change listenters registered by the shell are suspended.
// Get the cookie of the project file from the RTD.
IVsRunningDocumentTable rdt = this.project.Site.GetService(typeof(SVsRunningDocumentTable)) as IVsRunningDocumentTable;
Utilities.CheckNotNull(rdt);
IVsHierarchy hier;
uint itemid;
IntPtr unkData;
uint cookie;
ErrorHandler.ThrowOnFailure(rdt.FindAndLockDocument((uint)_VSRDTFLAGS.RDT_NoLock, this.project.Url, out hier,
out itemid, out unkData, out cookie));
if (IntPtr.Zero != unkData) {
Marshal.Release(unkData);
}
// Verify that we have a cookie.
if (0 == cookie) {
// This should never happen because if the project is open, then it must be in the RDT.
throw new InvalidOperationException();
}
// Get the IVsHierarchy for the project.
IVsHierarchy prjHierarchy = project.GetOuterInterface<IVsHierarchy>();
// Now get the soulution.
IVsSolution solution = this.project.Site.GetService(typeof(SVsSolution)) as IVsSolution;
// Verify that we have both solution and hierarchy.
Utilities.CheckNotNull(prjHierarchy);
Utilities.CheckNotNull(solution);
ErrorHandler.ThrowOnFailure(solution.SaveSolutionElement((uint)__VSSLNSAVEOPTIONS.SLNSAVEOPT_SaveIfDirty, prjHierarchy, cookie));
} else {
// We need to make some checks before we can call the save method on the project node.
// This is mainly because it is now us and not the caller like in case of SaveAs or Save that should validate the file name.
// The IPersistFileFormat.Save method only does a validation that is necessary to be performed. Example: in case of Save As the
// file name itself is not validated only the whole path. (thus a file name like file\file is accepted, since as a path is valid)
// 1. The file name has to be valid.
string fullPath = fileName;
try {
fullPath = CommonUtils.GetAbsoluteFilePath(((ProjectNode)Project).ProjectFolder, fileName);
}
// We want to be consistent in the error message and exception we throw. fileName could be for example #¤&%"¤&"% and that would trigger an ArgumentException on Path.IsRooted.
catch (ArgumentException ex) {
throw new InvalidOperationException(SR.GetString(SR.ErrorInvalidFileName, fileName), ex);
}
// It might be redundant but we validate the file and the full path of the file being valid. The SaveAs would also validate the path.
// If we decide that this is performance critical then this should be refactored.
Utilities.ValidateFileName(this.project.Site, fullPath);
if (!isCalledFromSaveAs) {
// 2. The file name has to be the same
if (!CommonUtils.IsSamePath(fullPath, this.project.Url)) {
throw new InvalidOperationException();
}
ErrorHandler.ThrowOnFailure(this.project.Save(fullPath, 1, 0));
} else {
ErrorHandler.ThrowOnFailure(this.project.Save(fullPath, 0, 0));
}
}
}
}
#endregion
}
/// <summary>
/// Specifies an alternate name for a property which cannot be fully captured using
/// .NET attribute names.
/// </summary>
[AttributeUsage(AttributeTargets.Property, AllowMultiple = false)]
class PropertyNameAttribute : Attribute {
public readonly string Name;
public PropertyNameAttribute(string name) {
Name = name;
}
}
}
| munyirik/nodejstools | Common/Product/SharedProject/Automation/OAProject.cs | C# | apache-2.0 | 16,787 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.core.metamodel.services.appfeat;
import java.util.SortedSet;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import org.apache.isis.applib.IsisApplibModule;
import org.apache.isis.applib.annotation.Programmatic;
import org.apache.isis.applib.annotation.SemanticsOf;
import org.apache.isis.applib.services.appfeat.ApplicationFeatureRepository;
import org.apache.isis.applib.services.appfeat.ApplicationMemberType;
import org.apache.isis.applib.util.ObjectContracts;
/**
* Canonical application feature, identified by {@link ApplicationFeatureId},
* and wired together with other application features and cached by {@link ApplicationFeatureRepository}.
*
* <p>
* Note that this is NOT a view model; instead it can be converted to a string using methods of
* {@link ApplicationFeatureRepository}, eg {@link ApplicationFeatureRepository#classNamesContainedIn(String, ApplicationMemberType)}.
* </p>
*/
public class ApplicationFeature implements Comparable<ApplicationFeature> {
public static abstract class PropertyDomainEvent<T> extends IsisApplibModule.PropertyDomainEvent<ApplicationFeature, T> {}
public static abstract class CollectionDomainEvent<T> extends IsisApplibModule.CollectionDomainEvent<ApplicationFeature, T> {}
public static abstract class ActionDomainEvent extends IsisApplibModule.ActionDomainEvent<ApplicationFeature> {}
//region > constants
// using same value for all to neaten up rendering
public static final int TYPICAL_LENGTH_PKG_FQN = 50;
public static final int TYPICAL_LENGTH_CLS_NAME = 50;
public static final int TYPICAL_LENGTH_MEMBER_NAME = 50;
//endregion
//region > constructors
public ApplicationFeature() {
this(null);
}
public ApplicationFeature(final ApplicationFeatureId featureId) {
setFeatureId(featureId);
}
//endregion
//region > featureId
private ApplicationFeatureId featureId;
@Programmatic
public ApplicationFeatureId getFeatureId() {
return featureId;
}
public void setFeatureId(final ApplicationFeatureId applicationFeatureId) {
this.featureId = applicationFeatureId;
}
//endregion
//region > memberType
private ApplicationMemberType memberType;
/**
* Only for {@link ApplicationFeatureType#MEMBER member}s.
*/
@Programmatic
public ApplicationMemberType getMemberType() {
return memberType;
}
public void setMemberType(final ApplicationMemberType memberType) {
this.memberType = memberType;
}
//endregion
//region > returnTypeName (for: properties, collections, actions)
private String returnTypeName;
/**
* Only for {@link ApplicationMemberType#ACTION action}s.
*/
@Programmatic
public String getReturnTypeName() {
return returnTypeName;
}
public void setReturnTypeName(final String returnTypeName) {
this.returnTypeName = returnTypeName;
}
//endregion
//region > contributed (for: properties, collections, actions)
private boolean contributed;
@Programmatic
public boolean isContributed() {
return contributed;
}
public void setContributed(final boolean contributed) {
this.contributed = contributed;
}
//endregion
//region > derived (properties and collections)
private Boolean derived;
/**
* Only for {@link ApplicationMemberType#PROPERTY} and {@link ApplicationMemberType#COLLECTION}
*/
@Programmatic
public Boolean isDerived() {
return derived;
}
public void setDerived(final Boolean derived) {
this.derived = derived;
}
//endregion
//region > propertyMaxLength (properties only)
private Integer propertyMaxLength;
/**
* Only for {@link ApplicationMemberType#ACTION action}s.
*/
@Programmatic
public Integer getPropertyMaxLength() {
return propertyMaxLength;
}
public void setPropertyMaxLength(final Integer propertyMaxLength) {
this.propertyMaxLength = propertyMaxLength;
}
//endregion
//region > propertyTypicalLength (properties only)
private Integer propertyTypicalLength;
/**
* Only for {@link ApplicationMemberType#ACTION action}s.
*/
@Programmatic
public Integer getPropertyTypicalLength() {
return propertyTypicalLength;
}
public void setPropertyTypicalLength(final Integer propertyTypicalLength) {
this.propertyTypicalLength = propertyTypicalLength;
}
//endregion
//region > actionSemantics (actions only)
private SemanticsOf actionSemantics;
/**
* Only for {@link ApplicationMemberType#ACTION action}s.
*/
@Programmatic
public SemanticsOf getActionSemantics() {
return actionSemantics;
}
public void setActionSemantics(final SemanticsOf actionSemantics) {
this.actionSemantics = actionSemantics;
}
//endregion
//region > packages: Contents
private final SortedSet<ApplicationFeatureId> contents = Sets.newTreeSet();
@Programmatic
public SortedSet<ApplicationFeatureId> getContents() {
ApplicationFeatureType.ensurePackage(this.getFeatureId());
return contents;
}
@Programmatic
public void addToContents(final ApplicationFeatureId contentId) {
ApplicationFeatureType.ensurePackage(this.getFeatureId());
ApplicationFeatureType.ensurePackageOrClass(contentId);
this.contents.add(contentId);
}
//endregion
//region > classes: Properties, Collections, Actions
private final SortedSet<ApplicationFeatureId> properties = Sets.newTreeSet();
@Programmatic
public SortedSet<ApplicationFeatureId> getProperties() {
ApplicationFeatureType.ensureClass(this.getFeatureId());
return properties;
}
private final SortedSet<ApplicationFeatureId> collections = Sets.newTreeSet();
@Programmatic
public SortedSet<ApplicationFeatureId> getCollections() {
ApplicationFeatureType.ensureClass(this.getFeatureId());
return collections;
}
private final SortedSet<ApplicationFeatureId> actions = Sets.newTreeSet();
@Programmatic
public SortedSet<ApplicationFeatureId> getActions() {
ApplicationFeatureType.ensureClass(this.getFeatureId());
return actions;
}
@Programmatic
public void addToMembers(final ApplicationFeatureId memberId, final ApplicationMemberType memberType) {
ApplicationFeatureType.ensureClass(this.getFeatureId());
ApplicationFeatureType.ensureMember(memberId);
membersOf(memberType).add(memberId);
}
@Programmatic
public SortedSet<ApplicationFeatureId> membersOf(final ApplicationMemberType memberType) {
ApplicationFeatureType.ensureClass(this.getFeatureId());
switch (memberType) {
case PROPERTY:
return properties;
case COLLECTION:
return collections;
default: // case ACTION:
return actions;
}
}
//endregion
//region > Functions
public static class Functions {
private Functions(){}
public static final Function<? super ApplicationFeature, ? extends String> GET_FQN = new Function<ApplicationFeature, String>() {
@Override
public String apply(final ApplicationFeature input) {
return input.getFeatureId().getFullyQualifiedName();
}
};
public static final Function<ApplicationFeature, ApplicationFeatureId> GET_ID =
new Function<ApplicationFeature, ApplicationFeatureId>() {
@Override
public ApplicationFeatureId apply(final ApplicationFeature input) {
return input.getFeatureId();
}
};
}
public static class Predicates {
private Predicates(){}
public static Predicate<ApplicationFeature> packageContainingClasses(
final ApplicationMemberType memberType, final ApplicationFeatureRepositoryDefault applicationFeatures) {
return new Predicate<ApplicationFeature>() {
@Override
public boolean apply(final ApplicationFeature input) {
// all the classes in this package
final Iterable<ApplicationFeatureId> classIds =
Iterables.filter(input.getContents(),
ApplicationFeatureId.Predicates.isClassContaining(memberType, applicationFeatures));
return classIds.iterator().hasNext();
}
};
}
}
//endregion
//region > equals, hashCode, compareTo, toString
private final static String propertyNames = "featureId";
@Override
public int compareTo(final ApplicationFeature other) {
return ObjectContracts.compare(this, other, propertyNames);
}
@Override
public boolean equals(final Object obj) {
return ObjectContracts.equals(this, obj, propertyNames);
}
@Override
public int hashCode() {
return ObjectContracts.hashCode(this, propertyNames);
}
@Override
public String toString() {
return ObjectContracts.toString(this, propertyNames);
}
//endregion
}
| niv0/isis | core/metamodel/src/main/java/org/apache/isis/core/metamodel/services/appfeat/ApplicationFeature.java | Java | apache-2.0 | 10,327 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.core.runtime.authentication.standard;
import java.util.List;
import org.apache.isis.core.commons.components.InstallerAbstract;
import org.apache.isis.core.commons.config.IsisConfiguration;
import org.apache.isis.core.runtime.authentication.AuthenticationManager;
import org.apache.isis.core.runtime.authentication.AuthenticationManagerInstaller;
public abstract class AuthenticationManagerStandardInstallerAbstract extends InstallerAbstract implements AuthenticationManagerInstaller {
public AuthenticationManagerStandardInstallerAbstract(
final String name,
final IsisConfiguration isisConfiguration) {
super(name, isisConfiguration);
}
@Override
public final AuthenticationManager createAuthenticationManager() {
final AuthenticationManagerStandard authenticationManager = createAuthenticationManagerStandard();
for (final Authenticator authenticator : createAuthenticators()) {
authenticationManager.addAuthenticator(authenticator);
}
return authenticationManager;
}
protected AuthenticationManagerStandard createAuthenticationManagerStandard() {
return new AuthenticationManagerStandard(getConfiguration());
}
/**
* Hook method
*
* @return
*/
protected abstract List<Authenticator> createAuthenticators();
@Override
public List<Class<?>> getTypes() {
return listOf(AuthenticationManager.class);
}
}
| niv0/isis | core/metamodel/src/main/java/org/apache/isis/core/runtime/authentication/standard/AuthenticationManagerStandardInstallerAbstract.java | Java | apache-2.0 | 2,324 |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.systemui.volume;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.content.Context;
import android.view.View;
import android.view.animation.AnimationUtils;
import android.view.animation.Interpolator;
public class IconPulser {
private static final float PULSE_SCALE = 1.1f;
private final Interpolator mFastOutSlowInInterpolator;
public IconPulser(Context context) {
mFastOutSlowInInterpolator = AnimationUtils.loadInterpolator(context,
android.R.interpolator.fast_out_slow_in);
}
public void start(final View target) {
if (target == null || target.getScaleX() != 1) return; // n/a, or already running
target.animate().cancel();
target.animate().scaleX(PULSE_SCALE).scaleY(PULSE_SCALE)
.setInterpolator(mFastOutSlowInInterpolator)
.setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
target.animate().scaleX(1).scaleY(1).setListener(null);
}
});
}
}
| Ant-Droid/android_frameworks_base_OLD | packages/SystemUI/src/com/android/systemui/volume/IconPulser.java | Java | apache-2.0 | 1,800 |
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// THIS CODE IS GENERATED - DO NOT MODIFY
// See angular/tools/gulp-tasks/cldr/extract.js
function plural(n) {
if (n === 0)
return 0;
if (n === 1)
return 1;
if (n === 2)
return 2;
if (n % 100 === Math.floor(n % 100) && n % 100 >= 3 && n % 100 <= 10)
return 3;
if (n % 100 === Math.floor(n % 100) && n % 100 >= 11 && n % 100 <= 99)
return 4;
return 5;
}
export default [
'ar-JO',
[
['ص', 'م'],
,
],
[['ص', 'م'], , ['صباحًا', 'مساءً']],
[
['ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س'],
[
'الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس',
'الجمعة', 'السبت'
],
,
['أحد', 'إثنين', 'ثلاثاء', 'أربعاء', 'خميس', 'جمعة', 'سبت']
],
,
[
['ك', 'ش', 'آ', 'ن', 'أ', 'ح', 'ت', 'آ', 'أ', 'ت', 'ت', 'ك'],
[
'كانون الثاني', 'شباط', 'آذار', 'نيسان', 'أيار', 'حزيران',
'تموز', 'آب', 'أيلول', 'تشرين الأول', 'تشرين الثاني',
'كانون الأول'
],
],
,
[['ق.م', 'م'], , ['قبل الميلاد', 'ميلادي']], 6, [5, 6],
['d\u200f/M\u200f/y', 'dd\u200f/MM\u200f/y', 'd MMMM y', 'EEEE، d MMMM y'],
['h:mm a', 'h:mm:ss a', 'h:mm:ss a z', 'h:mm:ss a zzzz'],
[
'{1} {0}',
,
,
],
[
'.', ',', ';', '\u200e%\u200e', '\u200e+', '\u200e-', 'E', '×', '‰', '∞',
'ليس رقمًا', ':'
],
['#,##0.###', '#,##0%', '¤ #,##0.00', '#E0'], 'د.أ.\u200f', 'دينار أردني', plural
];
//# sourceMappingURL=ar-JO.js.map | rospilot/rospilot | share/web_assets/nodejs_deps/node_modules/@angular/common/locales/ar-JO.js | JavaScript | apache-2.0 | 1,996 |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <aws/elasticloadbalancing/model/SetLoadBalancerPoliciesForBackendServerResult.h>
#include <aws/core/utils/xml/XmlSerializer.h>
#include <aws/core/AmazonWebServiceResult.h>
#include <aws/core/utils/StringUtils.h>
#include <utility>
using namespace Aws::ElasticLoadBalancing::Model;
using namespace Aws::Utils::Xml;
using namespace Aws::Utils;
using namespace Aws;
SetLoadBalancerPoliciesForBackendServerResult::SetLoadBalancerPoliciesForBackendServerResult()
{
}
SetLoadBalancerPoliciesForBackendServerResult::SetLoadBalancerPoliciesForBackendServerResult(const AmazonWebServiceResult<XmlDocument>& result)
{
*this = result;
}
SetLoadBalancerPoliciesForBackendServerResult& SetLoadBalancerPoliciesForBackendServerResult::operator =(const AmazonWebServiceResult<XmlDocument>& result)
{
const XmlDocument& xmlDocument = result.GetPayload();
XmlNode rootNode = xmlDocument.GetRootElement();
XmlNode resultNode = rootNode.FirstChild("SetLoadBalancerPoliciesForBackendServerResult");
if(!resultNode.IsNull())
{
}
XmlNode responseMetadataNode = rootNode.FirstChild("ResponseMetadata");
m_responseMetadata = responseMetadataNode;
return *this;
}
| bmildner/aws-sdk-cpp | aws-cpp-sdk-elasticloadbalancing/source/model/SetLoadBalancerPoliciesForBackendServerResult.cpp | C++ | apache-2.0 | 1,747 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.clients;
/**
* A callback interface for attaching an action to be executed when a request is complete and the corresponding response
* has been received. This handler will also be invoked if there is a disconnection while handling the request.
*/
public interface RequestCompletionHandler {
void onComplete(ClientResponse response);
}
| guozhangwang/kafka | clients/src/main/java/org/apache/kafka/clients/RequestCompletionHandler.java | Java | apache-2.0 | 1,168 |
from __future__ import print_function
import shlex
import subprocess
import sys
from .config import Configuration
class PkgConfig(object):
class Error(Exception):
"""Raised when information could not be obtained from pkg-config."""
def __init__(self, package_name):
"""Query pkg-config for information about a package.
:type package_name: str
:param package_name: The name of the package to query.
:raises PkgConfig.Error: When a call to pkg-config fails.
"""
self.package_name = package_name
self._cflags = self._call("--cflags")
self._cflags_only_I = self._call("--cflags-only-I")
self._cflags_only_other = self._call("--cflags-only-other")
self._libs = self._call("--libs")
self._libs_only_l = self._call("--libs-only-l")
self._libs_only_L = self._call("--libs-only-L")
self._libs_only_other = self._call("--libs-only-other")
def _call(self, *pkg_config_args):
try:
cmd = [Configuration.current.pkg_config] + list(pkg_config_args) + [self.package_name]
print("Executing command '{}'".format(cmd), file=sys.stderr)
return shlex.split(subprocess.check_output(cmd).decode('utf-8'))
except subprocess.CalledProcessError as e:
raise self.Error("pkg-config exited with error code {}".format(e.returncode))
@property
def swiftc_flags(self):
"""Flags for this package in a format suitable for passing to `swiftc`.
:rtype: list[str]
"""
return (
["-Xcc {}".format(s) for s in self._cflags_only_other]
+ ["-Xlinker {}".format(s) for s in self._libs_only_other]
+ self._cflags_only_I
+ self._libs_only_L
+ self._libs_only_l)
@property
def cflags(self):
"""CFLAGS for this package.
:rtype: list[str]
"""
return self._cflags
@property
def ldflags(self):
"""LDFLAGS for this package.
:rtype: list[str]
"""
return self._libs
| JGiola/swift-corelibs-foundation | lib/pkg_config.py | Python | apache-2.0 | 2,114 |