code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9
values | license stringclasses 15
values | size int32 3 1.05M |
|---|---|---|---|---|---|
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.iot.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.iot.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* ListPolicyPrincipalsRequestMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class ListPolicyPrincipalsRequestMarshaller {
private static final MarshallingInfo<String> POLICYNAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.HEADER)
.marshallLocationName("x-amzn-iot-policy").build();
private static final MarshallingInfo<String> MARKER_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.QUERY_PARAM).marshallLocationName("marker").build();
private static final MarshallingInfo<Integer> PAGESIZE_BINDING = MarshallingInfo.builder(MarshallingType.INTEGER)
.marshallLocation(MarshallLocation.QUERY_PARAM).marshallLocationName("pageSize").build();
private static final MarshallingInfo<Boolean> ASCENDINGORDER_BINDING = MarshallingInfo.builder(MarshallingType.BOOLEAN)
.marshallLocation(MarshallLocation.QUERY_PARAM).marshallLocationName("isAscendingOrder").build();
private static final ListPolicyPrincipalsRequestMarshaller instance = new ListPolicyPrincipalsRequestMarshaller();
public static ListPolicyPrincipalsRequestMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(ListPolicyPrincipalsRequest listPolicyPrincipalsRequest, ProtocolMarshaller protocolMarshaller) {
if (listPolicyPrincipalsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(listPolicyPrincipalsRequest.getPolicyName(), POLICYNAME_BINDING);
protocolMarshaller.marshall(listPolicyPrincipalsRequest.getMarker(), MARKER_BINDING);
protocolMarshaller.marshall(listPolicyPrincipalsRequest.getPageSize(), PAGESIZE_BINDING);
protocolMarshaller.marshall(listPolicyPrincipalsRequest.getAscendingOrder(), ASCENDINGORDER_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| dagnir/aws-sdk-java | aws-java-sdk-iot/src/main/java/com/amazonaws/services/iot/model/transform/ListPolicyPrincipalsRequestMarshaller.java | Java | apache-2.0 | 3,048 |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Linq;
using System.Windows.Forms;
using DevExpress.XtraEditors;
using NhaHang.Code.Helper.BanHang;
using TapHoaCode.GUI.Form1.BanHang;
using GymFitnessOlympic.Models;
using GymFitnessOlympic.Models.Util;
using GymFitnessOlympic.Controller;
using GymFitnessOlympic.Utils;
using GymFitnessOlympic.View.Utils;
namespace TanHongPhat
{
public partial class FrmBanHang : UserControl
{
List<SanPham> allSanPham;
public HoaDon hoaDon;
bool isNhap;
SanPham hangHienTai;
public FrmBanHang(bool isNhap = false)
{
InitializeComponent();
lblTongTien.Visible = !isNhap;
int phongID = Login1.GetPhongHienTai().MaPhongTap;
allSanPham = SanPhamController.GetList(phongID);
lbSanPham.DataSource = allSanPham;
lbSanPham.DisplayMember = "ListBoxString";
hoaDon = new HoaDon();
this.isNhap = isNhap;
lblTitle.Text = isNhap ? "Nhập hàng" : "Bán hàng";
if (isNhap) {
btnReset.Text = "Hủy phiếu nhập hiện tại";
btnSave.Text = "Nhập hàng";
}
txtMaVach.Focus();
}
private void FrmBanHang_Load(object sender, EventArgs e)
{
hoaDon = new HoaDon();
// loadListSanPham(allSanPham);
}
private void txtTenHangTim_EditValueChanged(object sender, EventArgs e)
{
//var st = txtTenHangTim.Text.Trim();
//var li = allSanPham.Where(s => s.TenSanPham.ToUpper().Contains(st.ToUpper())).ToList();
//loadListSanPham(li);
}
private void cbbNhom_SelectedIndexChanged(object sender, EventArgs e)
{
loadData();
}
void loadData() { }
private void lbHang_SelectedIndexChanged(object sender, EventArgs e)
{
if (lbSanPham.SelectedItem != null)
{
hangHienTai = (SanPham)lbSanPham.SelectedItem;
chuanBiThemSanPham(hangHienTai);
}
}
void chuanBiThemSanPham(SanPham h) {
txtMaVach.Text = h.MaVachSanPham.ToString();
txtTenHang.Text = h.TenSanPham;
txtDonGia.Text = h.Gia.ToString();
spnSoLuong.Value = 1 ;
}
private void btnThem_Click(object sender, EventArgs e)
{
ep.Clear();
int soLuong = 0, gia = 0;
if (txtDonGia.Text == "" || !int.TryParse(txtDonGia.Text, out gia)) {
ep.SetError(txtDonGia, "Đơn giá chưa nhập hoặc không hợp lệ");
txtDonGia.Focus();
return;
}
if (gia < 0) {
ep.SetError(txtDonGia, "Đơn giá phải là số nguyên dương");
txtDonGia.Focus();
return;
}
if (spnSoLuong.Value <= 0) {
ep.SetError(spnSoLuong, "Số lượng phải là số nguyên dương");
spnSoLuong.Focus();
return;
}
soLuong = Convert.ToInt32( spnSoLuong.Value);
//var h = (SanPham)lbSanPham.SelectedItem;
//if (!isNhap)
//{
// var r = HelperBanHang.themVaoPhieuNhap(this, hangHienTai);
//}
//else {
//}
//if (r == 0)
//{
// HelperBanHang.tangSoLuong(hoaDon, hangHienTai, Convert.ToInt32(spnSoLuong.Value));
//}
ChiTietHoaDon ct = new ChiTietHoaDon() {
SanPham = hangHienTai,
SoLuong = soLuong,
Gia = gia
};
//if (isNhap) {
var c1 = hoaDon.ChiTietHoaDon.FirstOrDefault(c => c.SanPham.MaSanPham == hangHienTai.MaSanPham);
if (c1 == null)
{
hoaDon.ChiTietHoaDon.Add(ct);
}
else {
c1.SoLuong += soLuong;
}
//}
updateGridHoaDon();
capNhatLabelTien();
}
private void updateGridHoaDon()
{
var pn = hoaDon;
lvHangNhap.Items.Clear();
foreach (ChiTietHoaDon c in pn.ChiTietHoaDon)
{
string[] s = { c.SanPham.MaSanPham.ToString(), c.SanPham.TenSanPham, c.SoLuong.ToString(), c.Gia.ToString(), (c.SoLuong * c.Gia).ToString() };
ListViewItem i = new ListViewItem(s);
lvHangNhap.Items.Add(i);
}
}
private void capNhatLabelTien()
{
lblTongTien.Text = hoaDon.ChiTietHoaDon.Sum(c => c.SoLuong * c.Gia).ToString().FormatCurrency() + "đ";
}
private void btnChiTietSua_Click(object sender, EventArgs e)
{
if (lvHangNhap.SelectedItems[0] != null)
{
var mh = Convert.ToInt32(lvHangNhap.SelectedItems[0].SubItems[0].Text);
var c = hoaDon.ChiTietHoaDon.FirstOrDefault(c1 => c1.MaSanPham == mh);
FrmChiTietEdit f = new FrmChiTietEdit(c);
f.Show();
f.FormClosed += finishEdit;
}
else {
MessageBox.Show("Chưa chọn mục để sửa");
}
}
void finishEdit(object sender, FormClosedEventArgs e)
{
updateForm();
}
void updateForm()
{
updateGridHoaDon();
capNhatLabelTien();
}
private void btnReset_Click(object sender, EventArgs e)
{
hoaDon = new HoaDon();
lvHangNhap.Items.Clear();
capNhatLabelTien();
}
private void btnSave_Click(object sender, EventArgs e)
{
}
private void btnChiTietXoa_Click(object sender, EventArgs e)
{
if (lvHangNhap.SelectedItems.Count > 0)
{
var mh = int.Parse(lvHangNhap.SelectedItems[0].SubItems[0].Text);
var c = hoaDon.ChiTietHoaDon.FirstOrDefault(c1 => c1.SanPham.MaSanPham == mh);
if (c != null)
{
hoaDon.ChiTietHoaDon.Remove(c);
}
updateForm();
}
else {
MessageBox.Show("Chưa chọn mục để xóa");
}
}
private void tabPage1_Click(object sender, EventArgs e)
{
}
private void toolStripStatusLabel1_Click(object sender, EventArgs e)
{
}
private void lvHangNhap_DrawItem(object sender, DrawListViewItemEventArgs e)
{
var sp = (SanPham) lbSanPham.Items[e.ItemIndex];
}
private void lbSanPham_MeasureItem(object sender, MeasureItemEventArgs e)
{
}
private void lbSanPham_Validating(object sender, CancelEventArgs e)
{
}
private void btnSave_Click_1(object sender, EventArgs e)
{
if (!isNhap)
{
var ct = hoaDon.ChiTietHoaDon.FirstOrDefault(c => c.SoLuong > c.SanPham.SoLuongHienTai);
if (ct != null)
{
MessageBox.Show("Mặt hàng " + ct.SanPham.TenSanPham + " có số lượng bán ra yêu cầu lớn hơn số lượng hiện có");
return;
}
}
if (hoaDon.ChiTietHoaDon.Count > 0)
{
//hoaDon.ThoiGianTao = DateTime.Now;
var nhanVien = Login1.TaiKhoanHienTai;
string message = isNhap ? "phiếu nhập" : "hóa đơn";
hoaDon.IsNhap = isNhap;
hoaDon.NgayLap = DateTime.Now;
hoaDon.NhanVien = nhanVien;
if (HoaDonController.Add(hoaDon) == CODE_RESULT_RETURN.ThanhCong)
{
DialogUtils.ShowMessage("Đã tạo "+message);
}
else {
DialogUtils.ShowMessage("Có lỗi khi tạo "+message);
}
btnReset.PerformClick();
return;
}
else
{
MessageBox.Show("Chưa có mặt hàng nào trong " + (isNhap ? "phiếu nhập" : "hóa đơn"));
}
}
private void btnQuet_Click(object sender, EventArgs e)
{
lblThongBao.Visible = false;
if (txtTimKiem.Text == "") {
ep.SetError(txtTimKiem, "Mã hàng không được trống");
txtTimKiem.Focus();
return;
}
var ma = txtTimKiem.Text.Trim();
hangHienTai = SanPhamController.GetByMa(ma);
if (hangHienTai != null)
{
chuanBiThemSanPham(hangHienTai);
lblThongBao.Text = "Tìm thấy mặt hàng";
}
else {
lblThongBao.Text = "Không tìm thấy mặt hàng có mã này";
}
lblThongBao.Visible = true;
}
private void txtTenHangTim_KeyPress(object sender, KeyPressEventArgs e)
{
if (e.KeyChar == (char)13) {
btnQuet.PerformClick();
}
}
private void lbSanPham_MouseDoubleClick(object sender, MouseEventArgs e)
{
}
}
} | hynguyen2610/OlympicGym | GymFitnessOlympic/View/UserControls/TacNghiep/BanHang/FrmBanHang.cs | C# | apache-2.0 | 9,781 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*
* =================================================================================================
*
* This software consists of voluntary contributions made by many individuals on behalf of the
* Apache Software Foundation. For more information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
* +------------------------------------------------------------------------------------------------+
* | License: http://openid-client.buession.com.cn/LICENSE |
* | Author: Yong.Teng <webmaster@buession.com> |
* | Copyright @ 2013-2014 Buession.com Inc. |
* +------------------------------------------------------------------------------------------------+
*/
package com.buession.openid.profile;
import org.pac4j.core.profile.AttributesDefinition;
import org.pac4j.openid.profile.OpenIdProfile;
import com.buession.open.profile.Profile;
/**
* @author Yong.Teng <webmaster@buession.com>
*/
public abstract class BaseOpenIdProfile extends OpenIdProfile implements Profile {
private static final long serialVersionUID = 7769220276513752800L;
public String getRealName() {
return null;
}
@Override
public String getDisplayName() {
String displayName = (String) getAttribute("display_name");
if (displayName == null || displayName.length() == 0) {
String realName = getRealName();
if (realName == null || realName.length() == 0) {
String firstName = getFirstName();
if (firstName == null) {
return getUsername();
} else {
String familyName = getFamilyName();
return familyName == null ? firstName : firstName + " " + familyName;
}
} else {
return realName;
}
} else {
return displayName;
}
}
public String getAvatar() {
return null;
}
@Override
public String getPictureUrl() {
return getAvatar();
}
public String getCountry() {
return null;
}
public String getProvince() {
return null;
}
public String getCity() {
return null;
}
public String getAddress() {
return null;
}
@Override
public String getLocation() {
return getAddress();
}
public boolean getIsVip() {
return false;
}
/**
* 返回用户资料属性属性定义
*
* @return 用户资料属性属性定义
*/
@Override
protected AttributesDefinition getAttributesDefinition() {
return null;
}
}
| eduosi/openid-client | src/main/java/com/buession/openid/profile/BaseOpenIdProfile.java | Java | apache-2.0 | 3,154 |
package com.math.function;
/**
* Created by arkadiy on 02/07/17.
*/
public class ReducedByFirstOperandBinaryOperation<T> implements Operation<T> {
private final BinaryOperation<T> binaryOperation;
private final T firstOperandValue;
public ReducedByFirstOperandBinaryOperation(BinaryOperation<T> binaryOperation, T firstOperandValue) {
this.binaryOperation = binaryOperation;
this.firstOperandValue = firstOperandValue;
}
@Override
public T apply(T operand) {
return binaryOperation.apply(firstOperandValue, operand);
}
}
| arkadius2006/arithmetic | src/main/java/com/math/function/ReducedByFirstOperandBinaryOperation.java | Java | apache-2.0 | 578 |
'use strict'
const Primus = require('primus')
const UglifyJS = require('uglify-js')
const fs = require('fs')
const path = require('path')
const util = require('util')
const uuid = require('uuid')
const ActionHero = require('./../index.js')
const api = ActionHero.api
module.exports = class WebSocketServer extends ActionHero.Server {
constructor () {
super()
this.type = 'websocket'
this.attributes = {
canChat: true,
logConnections: true,
logExits: true,
sendWelcomeMessage: true,
verbs: [
'quit',
'exit',
'documentation',
'roomAdd',
'roomLeave',
'roomView',
'detailsView',
'say'
]
}
}
initialize () {}
start () {
const webserver = api.servers.servers.web
this.server = new Primus(webserver.server, this.config.server)
this.writeClientJS()
this.server.on('connection', (rawConnection) => { this.handleConnection(rawConnection) })
this.server.on('disconnection', (rawConnection) => { this.handleDisconnection(rawConnection) })
this.log(`webSockets bound to ${webserver.options.bindIP}: ${webserver.options.port}`, 'debug')
this.active = true
this.on('connection', (connection) => {
connection.rawConnection.on('data', (data) => { this.handleData(connection, data) })
})
this.on('actionComplete', (data) => {
if (data.toRender !== false) {
data.connection.response.messageId = data.messageId
this.sendMessage(data.connection, data.response, data.messageId)
}
})
}
stop () {
this.active = false
if (this.config.destroyClientsOnShutdown === true) {
this.connections().forEach((connection) => { connection.destroy() })
}
if (this.server) { this.server.destroy() }
}
sendMessage (connection, message, messageId) {
if (message.error) {
message.error = api.config.errors.serializers.servers.websocket(message.error)
}
if (!message.context) { message.context = 'response' }
if (!messageId) { messageId = connection.messageId }
if (message.context === 'response' && !message.messageId) { message.messageId = messageId }
connection.rawConnection.write(message)
}
sendFile (connection, error, fileStream, mime, length, lastModified) {
const messageId = connection.messageId
let content = ''
let response = {
error: error,
content: null,
mime: mime,
length: length,
lastModified: lastModified
}
try {
if (!error) {
fileStream.on('data', (d) => { content += d })
fileStream.on('end', () => {
response.content = content
this.sendMessage(connection, response, messageId)
})
} else {
this.sendMessage(connection, response, messageId)
}
} catch (e) {
this.log(e, 'warning')
this.sendMessage(connection, response, messageId)
}
}
goodbye (connection) {
connection.rawConnection.end()
}
compileActionheroWebsocketClientJS () {
let ahClientSource = fs.readFileSync(path.join(__dirname, '/../client/ActionheroWebsocketClient.js')).toString()
let url = this.config.clientUrl
ahClientSource = ahClientSource.replace(/%%URL%%/g, url)
let defaults = {}
for (let i in this.config.client) { defaults[i] = this.config.client[i] }
defaults.url = url
let defaultsString = util.inspect(defaults)
defaultsString = defaultsString.replace('\'window.location.origin\'', 'window.location.origin')
ahClientSource = ahClientSource.replace('%%DEFAULTS%%', 'return ' + defaultsString)
return ahClientSource
}
renderClientJS (minimize) {
if (!minimize) { minimize = false }
let libSource = api.servers.servers.websocket.server.library()
let ahClientSource = this.compileActionheroWebsocketClientJS()
ahClientSource =
';;;\r\n' +
'(function(exports){ \r\n' +
ahClientSource +
'\r\n' +
'exports.ActionheroWebsocketClient = ActionheroWebsocketClient; \r\n' +
'exports.ActionheroWebsocketClient = ActionheroWebsocketClient; \r\n' +
'})(typeof exports === \'undefined\' ? window : exports);'
if (minimize) {
return UglifyJS.minify(libSource + '\r\n\r\n\r\n' + ahClientSource).code
} else {
return (libSource + '\r\n\r\n\r\n' + ahClientSource)
}
}
writeClientJS () {
if (!api.config.general.paths['public'] || api.config.general.paths['public'].length === 0) {
return
}
if (this.config.clientJsPath && this.config.clientJsName) {
let clientJSPath = path.normalize(
api.config.general.paths['public'][0] +
path.sep +
this.config.clientJsPath +
path.sep
)
let clientJSName = this.config.clientJsName
let clientJSFullPath = clientJSPath + clientJSName
try {
if (!fs.existsSync(clientJSPath)) {
fs.mkdirSync(clientJSPath)
}
fs.writeFileSync(clientJSFullPath + '.js', this.renderClientJS(false))
api.log(`wrote ${clientJSFullPath}.js`, 'debug')
fs.writeFileSync(clientJSFullPath + '.min.js', this.renderClientJS(true))
api.log(`wrote ${clientJSFullPath}.min.js`, 'debug')
} catch (e) {
api.log('Cannot write client-side JS for websocket server:', 'warning')
api.log(e, 'warning')
throw e
}
}
}
handleConnection (rawConnection) {
const fingerprint = rawConnection.query[api.config.servers.web.fingerprintOptions.cookieKey]
let { ip, port } = api.utils.parseHeadersForClientAddress(rawConnection.headers)
this.buildConnection({
rawConnection: rawConnection,
remoteAddress: ip || rawConnection.address.ip,
remotePort: port || rawConnection.address.port,
fingerprint: fingerprint
})
}
handleDisconnection (rawConnection) {
const connections = this.connections()
for (let i in connections) {
if (connections[i] && rawConnection.id === connections[i].rawConnection.id) {
connections[i].destroy()
break
}
}
}
async handleData (connection, data) {
const verb = data.event
delete data.event
connection.messageId = data.messageId || uuid.v4()
delete data.messageId
connection.params = {}
if (verb === 'action') {
for (let v in data.params) {
connection.params[v] = data.params[v]
}
connection.error = null
connection.response = {}
return this.processAction(connection)
}
if (verb === 'file') {
connection.params = {
file: data.file
}
return this.processFile(connection)
}
let words = []
let message
if (data.room) {
words.push(data.room)
delete data.room
}
for (let i in data) { words.push(data[i]) }
const messageId = connection.messageId
try {
let data = await connection.verbs(verb, words)
message = { status: 'OK', context: 'response', data: data }
return this.sendMessage(connection, message, messageId)
} catch (error) {
let formattedError = error.toString()
message = { status: formattedError, error: formattedError, context: 'response', data: data }
return this.sendMessage(connection, message, messageId)
}
}
}
| chimmelb/actionhero | servers/websocket.js | JavaScript | apache-2.0 | 7,292 |
/**
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
"use strict";
/**
* Adds default metadata to the given hash.
*/
module.exports.add = function(args) {
const timestamp = args.timestamp;
const fileName = args.fileName;
const canonical = args.config.host + fileName;
const logo = args.config.host + '/img/favicon.png';
const leader = args.config.host + '/img/social.png';
const metadata = {
datePublished: timestamp,
dateModified: timestamp,
fileName: fileName,
canonical: canonical,
logo: logo,
logoWidth: '512',
logoHeight: '512',
leader: leader,
leaderWidth: '1642',
leaderHeight: '715'
};
addMissingMetadata(args, metadata);
};
function addMissingMetadata(target, source) {
for (const prop in source) {
if (!target[prop]) {
target[prop] = source[prop];
}
}
}
| ampproject/amp-by-example | lib/Metadata.js | JavaScript | apache-2.0 | 1,408 |
#pragma once
#include <wx/wx.h>
#include <wx/app.h>
class Editor;
wxDECLARE_APP(Editor);
class MainWindow : public wxFrame {
public:
enum {
ID_New,
ID_Open,
ID_Save,
ID_SaveAs,
};
MainWindow() : MainWindow("Game Editor", wxPoint(50, 50), wxSize(500, 500)) { }
MainWindow(const wxString& title, const wxPoint& pos, const wxSize& size);
virtual void Update() { }
void onClose(wxCloseEvent& evt) {
//wxGetApp.activeRenderLoop(false);
evt.Skip();
}
private:
void OnHello(wxCommandEvent& event);
void OnExit(wxCommandEvent& event);
void OnAbout(wxCommandEvent& event);
wxDECLARE_EVENT_TABLE();
}; | LudumDareProject/CppGameEngine | src/editor/windows/MainWindow.hpp | C++ | apache-2.0 | 623 |
/******/ (function(modules) { // webpackBootstrap
/******/ function hotDownloadUpdateChunk(chunkId) {
/******/ var filename = require("path").join(__dirname, "" + chunkId + "." + hotCurrentHash + ".hot-update.js");
/******/ require("fs").readFile(filename, "utf-8", function(err, content) {
/******/ if(err) {
/******/ if(__webpack_require__.onError)
/******/ return __webpack_require__.onError(err);
/******/ else
/******/ throw err;
/******/ }
/******/ var chunk = {};
/******/ require("vm").runInThisContext("(function(exports) {" + content + "\n})", filename)(chunk);
/******/ hotAddUpdateChunk(chunk.id, chunk.modules);
/******/ });
/******/ }
/******/
/******/ function hotDownloadManifest(callback) {
/******/ var filename = require("path").join(__dirname, "" + hotCurrentHash + ".hot-update.json");
/******/ require("fs").readFile(filename, "utf-8", function(err, content) {
/******/ if(err) return callback();
/******/ try {
/******/ var update = JSON.parse(content);
/******/ } catch(e) {
/******/ return callback(e);
/******/ }
/******/ callback(null, update);
/******/ });
/******/ }
/******/
/******/
/******/
/******/
/******/ var hotApplyOnUpdate = true;
/******/ var hotCurrentHash = "5cb369ca91b819ced4a3";
/******/ var hotCurrentModuleData = {};
/******/ var hotCurrentParents = [];
/******/
/******/ function hotCreateRequire(moduleId) {
/******/ var me = installedModules[moduleId];
/******/ if(!me) return __webpack_require__;
/******/ var fn = function(request) {
/******/ if(me.hot.active) {
/******/ if(installedModules[request]) {
/******/ if(installedModules[request].parents.indexOf(moduleId) < 0)
/******/ installedModules[request].parents.push(moduleId);
/******/ if(me.children.indexOf(request) < 0)
/******/ me.children.push(request);
/******/ } else hotCurrentParents = [moduleId];
/******/ } else {
/******/ console.warn("[HMR] unexpected require(" + request + ") from disposed module " + moduleId);
/******/ hotCurrentParents = [];
/******/ }
/******/ return __webpack_require__(request);
/******/ };
/******/ for(var name in __webpack_require__) {
/******/ if(Object.prototype.hasOwnProperty.call(__webpack_require__, name)) {
/******/ fn[name] = __webpack_require__[name];
/******/ }
/******/ }
/******/ fn.e = function(chunkId, callback) {
/******/ if(hotStatus === "ready")
/******/ hotSetStatus("prepare");
/******/ hotChunksLoading++;
/******/ __webpack_require__.e(chunkId, function() {
/******/ try {
/******/ callback.call(null, fn);
/******/ } finally {
/******/ finishChunkLoading();
/******/ }
/******/ function finishChunkLoading() {
/******/ hotChunksLoading--;
/******/ if(hotStatus === "prepare") {
/******/ if(!hotWaitingFilesMap[chunkId]) {
/******/ hotEnsureUpdateChunk(chunkId);
/******/ }
/******/ if(hotChunksLoading === 0 && hotWaitingFiles === 0) {
/******/ hotUpdateDownloaded();
/******/ }
/******/ }
/******/ }
/******/ });
/******/ }
/******/ return fn;
/******/ }
/******/
/******/ function hotCreateModule(moduleId) {
/******/ var hot = {
/******/ // private stuff
/******/ _acceptedDependencies: {},
/******/ _declinedDependencies: {},
/******/ _selfAccepted: false,
/******/ _selfDeclined: false,
/******/ _disposeHandlers: [],
/******/
/******/ // Module API
/******/ active: true,
/******/ accept: function(dep, callback) {
/******/ if(typeof dep === "undefined")
/******/ hot._selfAccepted = true;
/******/ else if(typeof dep === "function")
/******/ hot._selfAccepted = dep;
/******/ else if(typeof dep === "number")
/******/ hot._acceptedDependencies[dep] = callback;
/******/ else for(var i = 0; i < dep.length; i++)
/******/ hot._acceptedDependencies[dep[i]] = callback;
/******/ },
/******/ decline: function(dep) {
/******/ if(typeof dep === "undefined")
/******/ hot._selfDeclined = true;
/******/ else if(typeof dep === "number")
/******/ hot._declinedDependencies[dep] = true;
/******/ else for(var i = 0; i < dep.length; i++)
/******/ hot._declinedDependencies[dep[i]] = true;
/******/ },
/******/ dispose: function(callback) {
/******/ hot._disposeHandlers.push(callback);
/******/ },
/******/ addDisposeHandler: function(callback) {
/******/ hot._disposeHandlers.push(callback);
/******/ },
/******/ removeDisposeHandler: function(callback) {
/******/ var idx = hot._disposeHandlers.indexOf(callback);
/******/ if(idx >= 0) hot._disposeHandlers.splice(idx, 1);
/******/ },
/******/
/******/ // Management API
/******/ check: hotCheck,
/******/ apply: hotApply,
/******/ status: function(l) {
/******/ if(!l) return hotStatus;
/******/ hotStatusHandlers.push(l);
/******/ },
/******/ addStatusHandler: function(l) {
/******/ hotStatusHandlers.push(l);
/******/ },
/******/ removeStatusHandler: function(l) {
/******/ var idx = hotStatusHandlers.indexOf(l);
/******/ if(idx >= 0) hotStatusHandlers.splice(idx, 1);
/******/ },
/******/
/******/ //inherit from previous dispose call
/******/ data: hotCurrentModuleData[moduleId]
/******/ };
/******/ return hot;
/******/ }
/******/
/******/ var hotStatusHandlers = [];
/******/ var hotStatus = "idle";
/******/
/******/ function hotSetStatus(newStatus) {
/******/ var oldStatus = hotStatus;
/******/ hotStatus = newStatus;
/******/ for(var i = 0; i < hotStatusHandlers.length; i++)
/******/ hotStatusHandlers[i].call(null, newStatus);
/******/ }
/******/
/******/ // while downloading
/******/ var hotWaitingFiles = 0;
/******/ var hotChunksLoading = 0;
/******/ var hotWaitingFilesMap = {};
/******/ var hotRequestedFilesMap = {};
/******/ var hotAvailibleFilesMap = {};
/******/ var hotCallback;
/******/
/******/ // The update info
/******/ var hotUpdate, hotUpdateNewHash;
/******/
/******/ function hotCheck(apply, callback) {
/******/ if(hotStatus !== "idle") throw new Error("check() is only allowed in idle status");
/******/ if(typeof apply === "function") {
/******/ hotApplyOnUpdate = false;
/******/ callback = apply;
/******/ } else {
/******/ hotApplyOnUpdate = apply;
/******/ callback = callback || function(err) { if(err) throw err };
/******/ }
/******/ hotSetStatus("check");
/******/ hotDownloadManifest(function(err, update) {
/******/ if(err) return callback(err);
/******/ if(!update) {
/******/ hotSetStatus("idle");
/******/ callback(null, null);
/******/ return;
/******/ }
/******/
/******/ hotRequestedFilesMap = {};
/******/ hotAvailibleFilesMap = {};
/******/ hotWaitingFilesMap = {};
/******/ for(var i = 0; i < update.c.length; i++)
/******/ hotAvailibleFilesMap[update.c[i]] = true;
/******/ hotUpdateNewHash = update.h;
/******/
/******/ hotSetStatus("prepare");
/******/ hotCallback = callback;
/******/ hotUpdate = {};
/******/ var chunkId = 0; {
/******/ hotEnsureUpdateChunk(chunkId);
/******/ }
/******/ if(hotChunksLoading === 0 && hotWaitingFiles === 0) {
/******/ hotUpdateDownloaded();
/******/ }
/******/ });
/******/ }
/******/
/******/ function hotAddUpdateChunk(chunkId, moreModules) {
/******/ if(!hotAvailibleFilesMap[chunkId] || !hotRequestedFilesMap[chunkId])
/******/ return;
/******/ hotRequestedFilesMap[chunkId] = false;
/******/ for(var moduleId in moreModules) {
/******/ if(Object.prototype.hasOwnProperty.call(moreModules, moduleId)) {
/******/ hotUpdate[moduleId] = moreModules[moduleId];
/******/ }
/******/ }
/******/ if(--hotWaitingFiles === 0 && hotChunksLoading === 0) {
/******/ hotUpdateDownloaded();
/******/ }
/******/ }
/******/
/******/ function hotEnsureUpdateChunk(chunkId) {
/******/ if(!hotAvailibleFilesMap[chunkId]) {
/******/ hotWaitingFilesMap[chunkId] = true;
/******/ } else {
/******/ hotRequestedFilesMap[chunkId] = true;
/******/ hotWaitingFiles++;
/******/ hotDownloadUpdateChunk(chunkId);
/******/ }
/******/ }
/******/
/******/ function hotUpdateDownloaded() {
/******/ hotSetStatus("ready");
/******/ var callback = hotCallback;
/******/ hotCallback = null;
/******/ if(!callback) return;
/******/ if(hotApplyOnUpdate) {
/******/ hotApply(hotApplyOnUpdate, callback);
/******/ } else {
/******/ var outdatedModules = [];
/******/ for(var id in hotUpdate) {
/******/ if(Object.prototype.hasOwnProperty.call(hotUpdate, id)) {
/******/ outdatedModules.push(+id);
/******/ }
/******/ }
/******/ callback(null, outdatedModules);
/******/ }
/******/ }
/******/
/******/ function hotApply(options, callback) {
/******/ if(hotStatus !== "ready") throw new Error("apply() is only allowed in ready status");
/******/ if(typeof options === "function") {
/******/ callback = options;
/******/ options = {};
/******/ } else if(options && typeof options === "object") {
/******/ callback = callback || function(err) { if(err) throw err };
/******/ } else {
/******/ options = {};
/******/ callback = callback || function(err) { if(err) throw err };
/******/ }
/******/
/******/ function getAffectedStuff(module) {
/******/ var outdatedModules = [module];
/******/ var outdatedDependencies = [];
/******/
/******/ var queue = outdatedModules.slice();
/******/ while(queue.length > 0) {
/******/ var moduleId = queue.pop();
/******/ var module = installedModules[moduleId];
/******/ if(!module || module.hot._selfAccepted)
/******/ continue;
/******/ if(module.hot._selfDeclined) {
/******/ return new Error("Aborted because of self decline: " + moduleId);
/******/ }
/******/ if(moduleId === 0) {
/******/ return;
/******/ }
/******/ for(var i = 0; i < module.parents.length; i++) {
/******/ var parentId = module.parents[i];
/******/ var parent = installedModules[parentId];
/******/ if(parent.hot._declinedDependencies[moduleId]) {
/******/ return new Error("Aborted because of declined dependency: " + moduleId + " in " + parentId);
/******/ }
/******/ if(outdatedModules.indexOf(parentId) >= 0) continue;
/******/ if(parent.hot._acceptedDependencies[moduleId]) {
/******/ if(!outdatedDependencies[parentId])
/******/ outdatedDependencies[parentId] = [];
/******/ addAllToSet(outdatedDependencies[parentId], [moduleId]);
/******/ continue;
/******/ }
/******/ delete outdatedDependencies[parentId];
/******/ outdatedModules.push(parentId);
/******/ queue.push(parentId);
/******/ }
/******/ }
/******/
/******/ return [outdatedModules, outdatedDependencies];
/******/ }
/******/ function addAllToSet(a, b) {
/******/ for(var i = 0; i < b.length; i++) {
/******/ var item = b[i];
/******/ if(a.indexOf(item) < 0)
/******/ a.push(item);
/******/ }
/******/ }
/******/
/******/ // at begin all updates modules are outdated
/******/ // the "outdated" status can propagate to parents if they don't accept the children
/******/ var outdatedDependencies = {};
/******/ var outdatedModules = [];
/******/ var appliedUpdate = {};
/******/ for(var id in hotUpdate) {
/******/ if(Object.prototype.hasOwnProperty.call(hotUpdate, id)) {
/******/ var moduleId = +id;
/******/ var result = getAffectedStuff(moduleId);
/******/ if(!result) {
/******/ if(options.ignoreUnaccepted)
/******/ continue;
/******/ hotSetStatus("abort");
/******/ return callback(new Error("Aborted because " + moduleId + " is not accepted"));
/******/ }
/******/ if(result instanceof Error) {
/******/ hotSetStatus("abort");
/******/ return callback(result);
/******/ }
/******/ appliedUpdate[moduleId] = hotUpdate[moduleId];
/******/ addAllToSet(outdatedModules, result[0]);
/******/ for(var moduleId in result[1]) {
/******/ if(Object.prototype.hasOwnProperty.call(result[1], moduleId)) {
/******/ if(!outdatedDependencies[moduleId])
/******/ outdatedDependencies[moduleId] = [];
/******/ addAllToSet(outdatedDependencies[moduleId], result[1][moduleId]);
/******/ }
/******/ }
/******/ }
/******/ }
/******/
/******/ // Store self accepted outdated modules to require them later by the module system
/******/ var outdatedSelfAcceptedModules = [];
/******/ for(var i = 0; i < outdatedModules.length; i++) {
/******/ var moduleId = outdatedModules[i];
/******/ if(installedModules[moduleId] && installedModules[moduleId].hot._selfAccepted)
/******/ outdatedSelfAcceptedModules.push({
/******/ module: moduleId,
/******/ errorHandler: installedModules[moduleId].hot._selfAccepted
/******/ });
/******/ }
/******/
/******/ // Now in "dispose" phase
/******/ hotSetStatus("dispose");
/******/ var queue = outdatedModules.slice();
/******/ while(queue.length > 0) {
/******/ var moduleId = queue.pop();
/******/ var module = installedModules[moduleId];
/******/ if(!module) continue;
/******/
/******/ var data = {};
/******/
/******/ // Call dispose handlers
/******/ var disposeHandlers = module.hot._disposeHandlers;
/******/ for(var j = 0; j < disposeHandlers.length; j++) {
/******/ var cb = disposeHandlers[j]
/******/ cb(data);
/******/ }
/******/ hotCurrentModuleData[moduleId] = data;
/******/
/******/ // disable module (this disables requires from this module)
/******/ module.hot.active = false;
/******/
/******/ // remove module from cache
/******/ delete installedModules[moduleId];
/******/
/******/ // remove "parents" references from all children
/******/ for(var j = 0; j < module.children.length; j++) {
/******/ var child = installedModules[module.children[j]];
/******/ if(!child) continue;
/******/ var idx = child.parents.indexOf(moduleId);
/******/ if(idx >= 0) {
/******/ child.parents.splice(idx, 1);
/******/ if(child.parents.length === 0 && child.hot && child.hot._disposeHandlers && child.hot._disposeHandlers.length > 0) {
/******/ // Child has dispose handlers and no more references, dispose it too
/******/ queue.push(child.id);
/******/ }
/******/ }
/******/ }
/******/ }
/******/
/******/ // remove outdated dependency from module children
/******/ for(var moduleId in outdatedDependencies) {
/******/ if(Object.prototype.hasOwnProperty.call(outdatedDependencies, moduleId)) {
/******/ var module = installedModules[moduleId];
/******/ var moduleOutdatedDependencies = outdatedDependencies[moduleId];
/******/ for(var j = 0; j < moduleOutdatedDependencies.length; j++) {
/******/ var dependency = moduleOutdatedDependencies[j];
/******/ var idx = module.children.indexOf(dependency);
/******/ if(idx >= 0) module.children.splice(idx, 1);
/******/ }
/******/ }
/******/ }
/******/
/******/ // Not in "apply" phase
/******/ hotSetStatus("apply");
/******/
/******/ hotCurrentHash = hotUpdateNewHash;
/******/
/******/ // insert new code
/******/ for(var moduleId in appliedUpdate) {
/******/ if(Object.prototype.hasOwnProperty.call(appliedUpdate, moduleId)) {
/******/ modules[moduleId] = appliedUpdate[moduleId];
/******/ }
/******/ }
/******/
/******/ // call accept handlers
/******/ var error = null;
/******/ for(var moduleId in outdatedDependencies) {
/******/ if(Object.prototype.hasOwnProperty.call(outdatedDependencies, moduleId)) {
/******/ var module = installedModules[moduleId];
/******/ var moduleOutdatedDependencies = outdatedDependencies[moduleId];
/******/ var callbacks = [];
/******/ for(var i = 0; i < moduleOutdatedDependencies.length; i++) {
/******/ var dependency = moduleOutdatedDependencies[i];
/******/ var cb = module.hot._acceptedDependencies[dependency];
/******/ if(callbacks.indexOf(cb) >= 0) continue;
/******/ callbacks.push(cb);
/******/ }
/******/ for(var i = 0; i < callbacks.length; i++) {
/******/ var cb = callbacks[i];
/******/ try {
/******/ cb(outdatedDependencies);
/******/ } catch(err) {
/******/ if(!error)
/******/ error = err;
/******/ }
/******/ }
/******/ }
/******/ }
/******/
/******/ // Load self accepted modules
/******/ for(var i = 0; i < outdatedSelfAcceptedModules.length; i++) {
/******/ var item = outdatedSelfAcceptedModules[i];
/******/ var moduleId = item.module;
/******/ hotCurrentParents = [moduleId];
/******/ try {
/******/ __webpack_require__(moduleId);
/******/ } catch(err) {
/******/ if(typeof item.errorHandler === "function") {
/******/ try {
/******/ item.errorHandler(err);
/******/ } catch(err) {
/******/ if(!error)
/******/ error = err;
/******/ }
/******/ } else if(!error)
/******/ error = err;
/******/ }
/******/ }
/******/
/******/ // handle errors in accept handlers and self accepted module load
/******/ if(error) {
/******/ hotSetStatus("fail");
/******/ return callback(error);
/******/ }
/******/
/******/ hotSetStatus("idle");
/******/ callback(null, outdatedModules);
/******/ }
/******/
/******/ // The module cache
/******/ var installedModules = {};
/******/
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ exports: {},
/******/ id: moduleId,
/******/ loaded: false,
/******/ hot: hotCreateModule(moduleId),
/******/ parents: hotCurrentParents,
/******/ children: []
/******/ };
/******/
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, hotCreateRequire(moduleId));
/******/
/******/ // Flag the module as loaded
/******/ module.loaded = true;
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/
/******/ // __webpack_hash__
/******/ __webpack_require__.h = function() { return hotCurrentHash; };
/******/
/******/ // Load entry module and return exports
/******/ return hotCreateRequire(0)(0);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */
/*!*************************************!*\
!*** ./loaders/val-loader/index.js ***!
\*************************************/
/***/ function(module, exports, __webpack_require__) {
it("should handle the val loader (piped with css loader) correctly", function() {
(__webpack_require__(/*! css!val!../_css/generateCss */ 1) + "").indexOf("generated").should.not.be.eql(-1);
(__webpack_require__(/*! css!val!../_css/generateCss */ 1) + "").indexOf(".rule-import2").should.not.be.eql(-1);
(__webpack_require__(/*! raw!val!../_css/generateCss */ 2) + "").indexOf("generated").should.not.be.eql(-1);
});
/***/ },
/* 1 */
/*!***********************************************************************************!*\
!*** (webpack)/~/css-loader!(webpack)/~/val-loader!./loaders/_css/generateCss.js ***!
\***********************************************************************************/
/***/ function(module, exports, __webpack_require__) {
exports = module.exports = __webpack_require__(/*! (webpack)/~/css-loader/cssToString.js */ 3)();
__webpack_require__(/*! (webpack)/~/css-loader/mergeImport.js */ 4)(exports, __webpack_require__(/*! -!(webpack)/~/css-loader!./folder/stylesheet-import1.css */ 5), "");
exports.push([module.id, "\r\n\r\n.rule-direct {\r\n\tbackground: lightgreen;\r\n}\n.generated { color: red; }", ""]);
/***/ },
/* 2 */
/*!***********************************************************************************!*\
!*** (webpack)/~/raw-loader!(webpack)/~/val-loader!./loaders/_css/generateCss.js ***!
\***********************************************************************************/
/***/ function(module, exports, __webpack_require__) {
module.exports = "@import url(folder/stylesheet-import1.css);\r\n\r\n.rule-direct {\r\n\tbackground: lightgreen;\r\n}\n.generated { color: red; }"
/***/ },
/* 3 */
/*!*********************************************!*\
!*** (webpack)/~/css-loader/cssToString.js ***!
\*********************************************/
/***/ function(module, exports, __webpack_require__) {
module.exports = function() {
var list = [];
list.toString = function toString() {
var result = [];
for(var i = 0; i < this.length; i++) {
var item = this[i];
if(item[2]) {
result.push("@media " + item[2] + "{" + item[1] + "}");
} else {
result.push(item[1]);
}
}
return result.join("");
};
return list;
}
/***/ },
/* 4 */
/*!*********************************************!*\
!*** (webpack)/~/css-loader/mergeImport.js ***!
\*********************************************/
/***/ function(module, exports, __webpack_require__) {
module.exports = function(list, importedList, media) {
for(var i = 0; i < importedList.length; i++) {
var item = importedList[i];
if(media && !item[2])
item[2] = media;
else if(media) {
item[2] = "(" + item[2] + ") and (" + media + ")";
}
list.push(item);
}
};
/***/ },
/* 5 */
/*!***************************************************************************!*\
!*** (webpack)/~/css-loader!./loaders/_css/folder/stylesheet-import1.css ***!
\***************************************************************************/
/***/ function(module, exports, __webpack_require__) {
exports = module.exports = __webpack_require__(/*! (webpack)/~/css-loader/cssToString.js */ 3)();
__webpack_require__(/*! (webpack)/~/css-loader/mergeImport.js */ 4)(exports, __webpack_require__(/*! -!(webpack)/~/css-loader!resources-module/stylesheet-import2.css */ 7), "print, screen");
__webpack_require__(/*! (webpack)/~/css-loader/mergeImport.js */ 4)(exports, __webpack_require__(/*! -!(webpack)/~/css-loader!./stylesheet-import3.css */ 6), "print and screen");
exports.push([module.id, "\r\n\r\n\r\n.rule-import1 {\r\n\tbackground: lightgreen;\r\n}\r\n", ""]);
/***/ },
/* 6 */
/*!***************************************************************************!*\
!*** (webpack)/~/css-loader!./loaders/_css/folder/stylesheet-import3.css ***!
\***************************************************************************/
/***/ function(module, exports, __webpack_require__) {
exports = module.exports = __webpack_require__(/*! (webpack)/~/css-loader/cssToString.js */ 3)();
exports.push([module.id, ".rule-import2 {\r\n\tbackground: red !important;\r\n}", ""]);
/***/ },
/* 7 */
/*!***************************************************************************************!*\
!*** (webpack)/~/css-loader!./loaders/_css/~/resources-module/stylesheet-import2.css ***!
\***************************************************************************************/
/***/ function(module, exports, __webpack_require__) {
exports = module.exports = __webpack_require__(/*! (webpack)/~/css-loader/cssToString.js */ 3)();
exports.push([module.id, ".rule-import2 {\r\n\tbackground: lightgreen;\r\n}", ""]);
/***/ }
/******/ ]) | raml-org/raml-dotnet-parser-2 | source/Raml.Parser/node_modules/raml-1-0-parser/node_modules/webpack/test/js/hot/loaders/val-loader/bundle.js | JavaScript | apache-2.0 | 24,067 |
/*
* Copyright (c) 2005, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.redhat.ceylon.langtools.source.tree;
/**
* A tree node for an 'assert' statement.
*
* For example:
* <pre>
* assert <em>condition</em> ;
*
* assert <em>condition</em> : <em>detail</em> ;
* </pre>
*
* @jls section 14.10
*
* @author Peter von der Ahé
* @author Jonathan Gibbons
* @since 1.6
*/
public interface AssertTree extends StatementTree {
ExpressionTree getCondition();
ExpressionTree getDetail();
}
| gijsleussink/ceylon | compiler-java/langtools/src/share/classes/com/redhat/ceylon/langtools/source/tree/AssertTree.java | Java | apache-2.0 | 1,666 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.core;
public final class ConfigurationConstants {
/**
* All configuration keys are prefixed with this:
* <tt>org.apache.sqoop.</tt>
*/
public static final String PREFIX_GLOBAL_CONFIG = "org.apache.sqoop.";
/**
* All logging related configuration is prefixed with this:
* <tt>org.apache.sqoop.log4j.</tt>
*/
public static final String PREFIX_LOG_CONFIG = PREFIX_GLOBAL_CONFIG
+ "log4j.";
/**
* Prefix for PropertiesConfigurationProvider implementation
*/
public static final String PREFIX_PROPERTIES_PROVIDER_CONFIG =
PREFIX_GLOBAL_CONFIG + "core.configuration.provider.properties.";
/**
* The system property that must be set for specifying the system
* configuration directory: <tt>sqoop.config.dir</tt>.
*/
public static final String SYSPROP_CONFIG_DIR = "SQOOP_CONFIG_DIR";
/**
* Bootstrap configuration property that specifies the system configuration
* provider: <tt>sqoop.config.provider</tt>.
*/
public static final String BOOTCFG_CONFIG_PROVIDER = "sqoop.config.provider";
/**
* Filename for the bootstrap configuration file:
* <tt>sqoop_bootstrap.properties</tt>.
*/
public static final String FILENAME_BOOTCFG_FILE =
"sqoop_bootstrap.properties";
public static final String FILENAME_CONNECTOR_PROPERTIES =
"sqoopconnector.properties";
public static final String FILENAME_CONNECTOR_JDBC_PROPERTIES =
"sqoopconnector-jdbc.properties";
public static final String FILENAME_CONNECTOR_HDFS_PROPERTIES =
"sqoopconnector-hdfs.properties";
public static final String FILENAME_CONNECTOR_KAFKA_PROPERTIES =
"sqoopconnector-kafka.properties";
public static final String FILENAME_CONNECTOR_KITE_PROPERTIES =
"sqoopconnector-kite.properties";
public static final String CONPROP_PROVIDER_CLASS =
"org.apache.sqoop.connector.class";
public static final String CONNPROP_CONNECTOR_NAME =
"org.apache.sqoop.connector.name";
public static final String PROPERTIES_PROVIDER_SLEEP =
PREFIX_PROPERTIES_PROVIDER_CONFIG + "sleep";
public static final String CONNECTOR_AUTO_UPGRADE =
"org.apache.sqoop.connector.autoupgrade";
public static final String DRIVER_AUTO_UPGRADE =
"org.apache.sqoop.driver.autoupgrade";
/**
# Support loading external connector jars only
# The loader will look for sqoopconnector.properties file in the jar before loading
# "/path/to/external/connectors/": Add all the connector JARs in the specified folder
*/
public static final String EXTERNAL_CONNECTOR_LOAD_PATH = "org.apache.sqoop.connector.external.loadpath";
public static final String UBER_JAR_PATH = "org.apache.sqoop.uber.jar.loadpath";
/**
* Enable Sqoop App to kill Tomcat in case that it will fail to load.
*/
public static final String KILL_TOMCAT_ON_FAILURE = "sqoop.kill_tomcat_on_load_failure";
private ConfigurationConstants() {
// Disable explicit object creation
}
}
| vybs/sqoop-on-spark | core/src/main/java/org/apache/sqoop/core/ConfigurationConstants.java | Java | apache-2.0 | 3,809 |
/*
* Copyright © 2013 Ben Bader
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System.Collections.Generic;
namespace Stiletto.Internal
{
internal class SingletonBinding : Binding
{
private readonly Binding binding;
private object instance;
private bool initialized;
public Binding DelegateBinding
{
get { return binding; }
}
public override bool IsResolved
{
get { return binding.IsResolved; }
set { binding.IsResolved = value; }
}
public override bool IsCycleFree
{
get { return binding.IsCycleFree; }
set { binding.IsCycleFree = value; }
}
public override bool IsVisiting
{
get { return binding.IsVisiting; }
set { binding.IsVisiting = value; }
}
public override bool IsLibrary
{
get { return binding.IsLibrary; }
set { binding.IsLibrary = value; }
}
public override bool IsDependedOn
{
get { return binding.IsDependedOn; }
set { binding.IsDependedOn = value; }
}
internal SingletonBinding(Binding binding)
: base(binding.ProviderKey, binding.MembersKey, true, binding.RequiredBy)
{
this.binding = binding;
}
public override void Resolve(Resolver resolver)
{
binding.Resolve(resolver);
}
public override object Get()
{
if (!initialized)
{
lock (this)
{
instance = binding.Get();
initialized = true;
}
}
return instance;
}
public override void GetDependencies(ISet<Binding> injectDependencies, ISet<Binding> propertyDependencies)
{
binding.GetDependencies(injectDependencies, propertyDependencies);
}
}
}
| benjamin-bader/stiletto | Stiletto/Internal/SingletonBinding.cs | C# | apache-2.0 | 2,531 |
using System.Collections.Generic;
using Hl7.Fhir.Publication.Specification.HierarchicalTable.Cells.Component;
using Hl7.Fhir.Publication.Specification.TableModel;
namespace Hl7.Fhir.Publication.Specification.HierarchicalTable.Cells
{
internal class BodyCell : Cell
{
private readonly IEnumerable<Piece> _pieces;
public BodyCell(IEnumerable<Piece> pieces)
: base("td")
{
_pieces = pieces;
}
protected override IEnumerable<CellComponent> CreateCellComponents()
{
if (_pieces != null)
{
foreach (Piece piece in _pieces)
{
if (!string.IsNullOrEmpty(piece.GetTag()))
{
yield return new StyledTag(piece.GetStyle(), piece.GetTag());
}
else if (!string.IsNullOrEmpty(piece.GetReference()))
{
yield return new Reference(piece.GetStyle(), piece.GetReference(), piece.GetHint(), piece.GetText());
}
else if (!string.IsNullOrEmpty(piece.GetHint()))
{
yield return new HintText(piece.GetHint(), piece.GetStyle(), piece.GetText());
}
else if (piece.GetStyle() != null)
{
yield return new StyledText(piece.GetStyle(), piece.GetText());
}
else if (piece.GetLabel() != null)
{
yield return new SpanClass(piece.GetLabel(), piece.GetText());
}
else
{
yield return new Text(piece.GetText());
}
}
}
}
}
} | InteropAdmin/FHIRGEN-DSTU2 | Fhir.Publication/Specification/HierarchicalTable/Cells/BodyCell.cs | C# | apache-2.0 | 1,437 |
package com.dfyy.dao;
import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
import org.springframework.stereotype.Repository;
import com.dfyy.bussiness.UserToken;
@Repository
public interface UserTokenDao extends CrudRepository<UserToken, String>, JpaSpecificationExecutor<UserToken> {
@Query("select u from UserToken as u where u.uid = ?1")
public UserToken findByUserID(String uid);
@Query("select count(u) from UserToken as u where u.uid = ?1")
public int findByUserIDCount(String uid);
@Query("select u from UserToken as u where u.uid = ?1 and u.token = ?2")
public UserToken findByUserIDAndToken(String uid,String token);
}
| secondflying/dfyy | src/main/java/com/dfyy/dao/UserTokenDao.java | Java | apache-2.0 | 775 |
package com.aelns;
import com.aelns.core.utils.number.RandomUtil;
import com.aelns.dao.CityMapper;
import com.aelns.model.City;
import com.aelns.model.CityExample;
import org.apache.commons.collections4.CollectionUtils;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.annotation.Rollback;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.util.List;
/**
* Created by aelns on 2017/3/8.
*/
@RunWith(SpringJUnit4ClassRunner.class)
@SpringBootTest
public class ApplicationTests {
@Autowired
private CityMapper cityMapper;
@Test
@Rollback
public void findByName() throws Exception {
String cityName = "Test city name";
// add data
City city = new City();
city.setId(new RandomUtil().hashCode());
city.setCityName(cityName);
cityMapper.insert(city);
// find by name
CityExample example = new CityExample();
example.createCriteria().andCityNameEqualTo(cityName);
List<City> list = cityMapper.selectByExample(example);
Assert.assertEquals(true, CollectionUtils.isNotEmpty(list));
// clean data
list.forEach(c -> cityMapper.deleteByPrimaryKey(c.getId()));
}
}
| alwaysc/aelns-api | aelns-api/src/test/java/com/aelns/ApplicationTests.java | Java | apache-2.0 | 1,415 |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* AdUnitSize.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202111;
/**
* An {@code AdUnitSize} represents the size of an ad in an ad unit.
* This also represents
* the environment and companions of a particular ad in an
* ad unit. In most cases, it is
* a simple size with just a width and a height (sometimes
* representing an aspect ratio).
*/
public class AdUnitSize implements java.io.Serializable {
/* The permissible creative size that can be served inside this
* ad unit. */
private com.google.api.ads.admanager.axis.v202111.Size size;
/* The environment type of the ad unit size. The default value
* is
* {@link EnvironmentType#BROWSER}. */
private com.google.api.ads.admanager.axis.v202111.EnvironmentType environmentType;
/* The companions for this ad unit size.
* Companions are only valid if the environment is {@link
* EnvironmentType#VIDEO_PLAYER}.
* If the environment is {@link EnvironmentType#BROWSER}
* including companions
* results in an error. */
private com.google.api.ads.admanager.axis.v202111.AdUnitSize[] companions;
/* The full (including companion sizes, if applicable) display
* string of the size,
* e.g. {@code "300x250"} or {@code "300x250v (180x150)"} */
private java.lang.String fullDisplayString;
/* Whether the inventory size is audio. If set to true, {@code
* Size} will be set to {@code "1x1"}
* and {@code EnvironmentType} will be set to {@link
* EnvironmentType#VIDEO_PLAYER} regardless of
* user input. */
private java.lang.Boolean isAudio;
public AdUnitSize() {
}
public AdUnitSize(
com.google.api.ads.admanager.axis.v202111.Size size,
com.google.api.ads.admanager.axis.v202111.EnvironmentType environmentType,
com.google.api.ads.admanager.axis.v202111.AdUnitSize[] companions,
java.lang.String fullDisplayString,
java.lang.Boolean isAudio) {
this.size = size;
this.environmentType = environmentType;
this.companions = companions;
this.fullDisplayString = fullDisplayString;
this.isAudio = isAudio;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
.add("companions", getCompanions())
.add("environmentType", getEnvironmentType())
.add("fullDisplayString", getFullDisplayString())
.add("isAudio", getIsAudio())
.add("size", getSize())
.toString();
}
/**
* Gets the size value for this AdUnitSize.
*
* @return size * The permissible creative size that can be served inside this
* ad unit.
*/
public com.google.api.ads.admanager.axis.v202111.Size getSize() {
return size;
}
/**
* Sets the size value for this AdUnitSize.
*
* @param size * The permissible creative size that can be served inside this
* ad unit.
*/
public void setSize(com.google.api.ads.admanager.axis.v202111.Size size) {
this.size = size;
}
/**
* Gets the environmentType value for this AdUnitSize.
*
* @return environmentType * The environment type of the ad unit size. The default value
* is
* {@link EnvironmentType#BROWSER}.
*/
public com.google.api.ads.admanager.axis.v202111.EnvironmentType getEnvironmentType() {
return environmentType;
}
/**
* Sets the environmentType value for this AdUnitSize.
*
* @param environmentType * The environment type of the ad unit size. The default value
* is
* {@link EnvironmentType#BROWSER}.
*/
public void setEnvironmentType(com.google.api.ads.admanager.axis.v202111.EnvironmentType environmentType) {
this.environmentType = environmentType;
}
/**
* Gets the companions value for this AdUnitSize.
*
* @return companions * The companions for this ad unit size.
* Companions are only valid if the environment is {@link
* EnvironmentType#VIDEO_PLAYER}.
* If the environment is {@link EnvironmentType#BROWSER}
* including companions
* results in an error.
*/
public com.google.api.ads.admanager.axis.v202111.AdUnitSize[] getCompanions() {
return companions;
}
/**
* Sets the companions value for this AdUnitSize.
*
* @param companions * The companions for this ad unit size.
* Companions are only valid if the environment is {@link
* EnvironmentType#VIDEO_PLAYER}.
* If the environment is {@link EnvironmentType#BROWSER}
* including companions
* results in an error.
*/
public void setCompanions(com.google.api.ads.admanager.axis.v202111.AdUnitSize[] companions) {
this.companions = companions;
}
public com.google.api.ads.admanager.axis.v202111.AdUnitSize getCompanions(int i) {
return this.companions[i];
}
public void setCompanions(int i, com.google.api.ads.admanager.axis.v202111.AdUnitSize _value) {
this.companions[i] = _value;
}
/**
* Gets the fullDisplayString value for this AdUnitSize.
*
* @return fullDisplayString * The full (including companion sizes, if applicable) display
* string of the size,
* e.g. {@code "300x250"} or {@code "300x250v (180x150)"}
*/
public java.lang.String getFullDisplayString() {
return fullDisplayString;
}
/**
* Sets the fullDisplayString value for this AdUnitSize.
*
* @param fullDisplayString * The full (including companion sizes, if applicable) display
* string of the size,
* e.g. {@code "300x250"} or {@code "300x250v (180x150)"}
*/
public void setFullDisplayString(java.lang.String fullDisplayString) {
this.fullDisplayString = fullDisplayString;
}
/**
* Gets the isAudio value for this AdUnitSize.
*
* @return isAudio * Whether the inventory size is audio. If set to true, {@code
* Size} will be set to {@code "1x1"}
* and {@code EnvironmentType} will be set to {@link
* EnvironmentType#VIDEO_PLAYER} regardless of
* user input.
*/
public java.lang.Boolean getIsAudio() {
return isAudio;
}
/**
* Sets the isAudio value for this AdUnitSize.
*
* @param isAudio * Whether the inventory size is audio. If set to true, {@code
* Size} will be set to {@code "1x1"}
* and {@code EnvironmentType} will be set to {@link
* EnvironmentType#VIDEO_PLAYER} regardless of
* user input.
*/
public void setIsAudio(java.lang.Boolean isAudio) {
this.isAudio = isAudio;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof AdUnitSize)) return false;
AdUnitSize other = (AdUnitSize) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true &&
((this.size==null && other.getSize()==null) ||
(this.size!=null &&
this.size.equals(other.getSize()))) &&
((this.environmentType==null && other.getEnvironmentType()==null) ||
(this.environmentType!=null &&
this.environmentType.equals(other.getEnvironmentType()))) &&
((this.companions==null && other.getCompanions()==null) ||
(this.companions!=null &&
java.util.Arrays.equals(this.companions, other.getCompanions()))) &&
((this.fullDisplayString==null && other.getFullDisplayString()==null) ||
(this.fullDisplayString!=null &&
this.fullDisplayString.equals(other.getFullDisplayString()))) &&
((this.isAudio==null && other.getIsAudio()==null) ||
(this.isAudio!=null &&
this.isAudio.equals(other.getIsAudio())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
if (getSize() != null) {
_hashCode += getSize().hashCode();
}
if (getEnvironmentType() != null) {
_hashCode += getEnvironmentType().hashCode();
}
if (getCompanions() != null) {
for (int i=0;
i<java.lang.reflect.Array.getLength(getCompanions());
i++) {
java.lang.Object obj = java.lang.reflect.Array.get(getCompanions(), i);
if (obj != null &&
!obj.getClass().isArray()) {
_hashCode += obj.hashCode();
}
}
}
if (getFullDisplayString() != null) {
_hashCode += getFullDisplayString().hashCode();
}
if (getIsAudio() != null) {
_hashCode += getIsAudio().hashCode();
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(AdUnitSize.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "AdUnitSize"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("size");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "size"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "Size"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("environmentType");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "environmentType"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "EnvironmentType"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("companions");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "companions"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "AdUnitSize"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
elemField.setMaxOccursUnbounded(true);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("fullDisplayString");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "fullDisplayString"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("isAudio");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "isAudio"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "boolean"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| googleads/googleads-java-lib | modules/dfp_axis/src/main/java/com/google/api/ads/admanager/axis/v202111/AdUnitSize.java | Java | apache-2.0 | 14,102 |
// Generated from /POI/java/org/apache/poi/ss/formula/ptg/IntersectionPtg.java
#pragma once
#include <fwd-POI.hpp>
#include <java/io/fwd-POI.hpp>
#include <java/lang/fwd-POI.hpp>
#include <org/apache/poi/ss/formula/ptg/fwd-POI.hpp>
#include <org/apache/poi/util/fwd-POI.hpp>
#include <org/apache/poi/ss/formula/ptg/OperationPtg.hpp>
template<typename ComponentType, typename... Bases> struct SubArray;
namespace java
{
namespace io
{
typedef ::SubArray< ::java::io::Serializable, ::java::lang::ObjectArray > SerializableArray;
} // io
namespace lang
{
typedef ::SubArray< ::java::lang::CharSequence, ObjectArray > CharSequenceArray;
typedef ::SubArray< ::java::lang::Comparable, ObjectArray > ComparableArray;
typedef ::SubArray< ::java::lang::String, ObjectArray, ::java::io::SerializableArray, ComparableArray, CharSequenceArray > StringArray;
} // lang
} // java
struct default_init_tag;
class poi::ss::formula::ptg::IntersectionPtg final
: public OperationPtg
{
public:
typedef OperationPtg super;
static constexpr int8_t sid { int8_t(15) };
private:
static OperationPtg* instance_;
protected:
void ctor();
public:
bool isBaseToken() override;
int32_t getSize() override;
void write(::poi::util::LittleEndianOutput* out) override;
::java::lang::String* toFormulaString() override;
::java::lang::String* toFormulaString(::java::lang::StringArray* operands) override;
int32_t getNumberOfOperands() override;
// Generated
private:
IntersectionPtg();
protected:
IntersectionPtg(const ::default_init_tag&);
public:
static ::java::lang::Class *class_();
static void clinit();
static OperationPtg*& instance();
private:
virtual ::java::lang::Class* getClass0();
};
| pebble2015/cpoi | src/org/apache/poi/ss/formula/ptg/IntersectionPtg.hpp | C++ | apache-2.0 | 1,772 |
/*
* Copyright (C) 2014 The Calrissian Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.calrissian.insight;
import org.junit.Test;
/**
* Test for the AssertBuilderCollector class.
*/
public class AssertBuilderCollectorTest {
@Test(expected = NullPointerException.class)
public void testAddNull() {
new AssertBuilderCollector().add(null);
}
@Test
public void testAddPass() {
final AssertBuilderCollector assertBuilder = new AssertBuilderCollector();
assertBuilder.add(pass());
assertBuilder.runAssert();
}
@Test(expected = AssertionError.class)
public void testAddFailure() {
final AssertBuilderCollector assertBuilder = new AssertBuilderCollector();
assertBuilder.add(fail());
assertBuilder.runAssert();
}
@Test
public void testReset() {
final AssertBuilderCollector assertBuilder = new AssertBuilderCollector();
assertBuilder.add(fail());
assertBuilder.reset();
assertBuilder.runAssert();
}
@Test
public void testRunAndReset() {
final AssertBuilderCollector assertBuilder = new AssertBuilderCollector();
assertBuilder.add(fail());
try {
assertBuilder.runAndReset();
throw new AssertionError("failed");
} catch (final AssertionError e) {
//pass
}
assertBuilder.runAssert();
}
/**
* AssertBuilder that will always fail.
*
* @return an AssertBuilder that will fail.
*/
private static AssertBuilder fail() {
return new AssertBuilder() {
@Override
public void runAssert() {
throw new AssertionError("failed");
}
};
}
/**
* AssertBuilder that will always pass.
*
* @return an AssertBuilder that will pass.
*/
private static AssertBuilder pass() {
return new AssertBuilder() {
@Override
public void runAssert() {}
};
}
} | calrissian/insight | src/test/java/org/calrissian/insight/AssertBuilderCollectorTest.java | Java | apache-2.0 | 2,561 |
package com.hue.model;
import java.util.Set;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.google.common.collect.Sets;
import com.hue.graph.Graphable;
public class FieldExpression implements Graphable{
private String sql;
private Set<Table> tables = Sets.newHashSet();
@JsonIgnore
private Vertex v;
public FieldExpression() {
}
public FieldExpression(String sql, Set
<Table> tables) {
setSql(sql);
setTables(tables);
}
public String getSql() {
return sql;
}
public void setSql(String sql) {
if(sql != null)
this.sql = sql.trim();
}
@Override
public Vertex v() {
return v;
}
@Override
public void v(Vertex v) {
this.v = v;
}
@Override
public String getName() {
return this.toString();
}
public Set<Table> getTables() {
return tables;
}
public void setTables(Set<Table> tables) {
this.tables = tables;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((sql == null) ? 0 : sql.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
FieldExpression other = (FieldExpression) obj;
if (sql == null) {
if (other.sql != null)
return false;
} else if (!sql.equals(other.sql))
return false;
return true;
}
@Override
public String toString() {
return "exp:" + sql;
}
public boolean hasTable(Table t) {
return getTables()
.stream()
.filter(tt -> tt.equals(t))
.findFirst()
.isPresent();
}
}
| ajoabraham/hue | src/main/java/com/hue/model/FieldExpression.java | Java | apache-2.0 | 1,683 |
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace DefaultReverseEngineering
{
[Table("ProductVendor", Schema = "Purchasing")]
public partial class ProductVendor
{
public int ProductID { get; set; }
public int BusinessEntityID { get; set; }
public int AverageLeadTime { get; set; }
public decimal? LastReceiptCost { get; set; }
public DateTime? LastReceiptDate { get; set; }
public int MaxOrderQty { get; set; }
public int MinOrderQty { get; set; }
public DateTime ModifiedDate { get; set; }
public int? OnOrderQty { get; set; }
public decimal StandardPrice { get; set; }
[Required]
[MaxLength(3)]
public string UnitMeasureCode { get; set; }
[ForeignKey("BusinessEntityID")]
[InverseProperty("ProductVendor")]
public virtual Vendor BusinessEntity { get; set; }
[ForeignKey("ProductID")]
[InverseProperty("ProductVendor")]
public virtual Product Product { get; set; }
[ForeignKey("UnitMeasureCode")]
[InverseProperty("ProductVendor")]
public virtual UnitMeasure UnitMeasureCodeNavigation { get; set; }
}
}
| fsprojects/FSharp.EntityFramework.MicrosoftSqlServer | tests/ReverseEngineering.Annotations/ProductVendor.cs | C# | apache-2.0 | 1,303 |
/*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.java.sip.communicator.plugin.sipaccregwizz;
import java.awt.*;
import java.util.*;
import java.util.List;
import javax.swing.*;
import net.java.sip.communicator.plugin.desktoputil.*;
import net.java.sip.communicator.plugin.desktoputil.wizard.*;
import net.java.sip.communicator.service.protocol.*;
import net.java.sip.communicator.service.protocol.sip.*;
import org.apache.commons.lang3.StringUtils;
import org.jitsi.utils.*;
/**
* The <tt>SIPAccountRegistrationForm</tt>.
*
* @author Yana Stamcheva
* @author Grogorii Balutsel
* @author Pawel Domas
*/
public class SIPAccountRegistrationForm
extends TransparentPanel
{
/**
* Serial version UID.
*/
private static final long serialVersionUID = 0L;
private final AccountPanel accountPanel;
private final ConnectionPanel connectionPanel;
private final SecurityPanel securityPanel;
private final PresencePanel presencePanel;
/**
* The panel for encoding settings
*/
private final EncodingsPanel encodingsPanel;
private boolean isModification;
private final SIPAccountRegistrationWizard wizard;
private final JTabbedPane tabbedPane = new SIPCommTabbedPane();
/**
* The panels which value needs validation before we continue.
*/
private List<ValidatingPanel> validatingPanels =
new ArrayList<ValidatingPanel>();
/**
* Creates an instance of <tt>SIPAccountRegistrationForm</tt>.
* @param wizard the parent wizard
*/
public SIPAccountRegistrationForm(SIPAccountRegistrationWizard wizard)
{
super(new BorderLayout());
this.wizard = wizard;
accountPanel = new AccountPanel(this);
connectionPanel = new ConnectionPanel(this);
securityPanel = new SecurityPanel(
this.getRegistration().getSecurityRegistration(),
true);
presencePanel = new PresencePanel(this);
encodingsPanel = new EncodingsPanel();
}
/**
* Initializes all panels, buttons, etc.
*/
void init()
{
this.setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 10));
accountPanel.initAdvancedForm();
SIPAccountCreationFormService createService = getCreateAccountService();
if (createService != null)
createService.clear();
if (!SIPAccRegWizzActivator.isAdvancedAccountConfigDisabled())
{
if (accountPanel.getParent() != tabbedPane)
tabbedPane.addTab( Resources.getString("service.gui.ACCOUNT"),
accountPanel);
if (connectionPanel.getParent() != tabbedPane)
tabbedPane.addTab(Resources.getString("service.gui.CONNECTION"),
connectionPanel);
if (securityPanel.getParent() != tabbedPane)
tabbedPane.addTab(Resources.getString("service.gui.SECURITY"),
securityPanel);
if (presencePanel.getParent() != tabbedPane)
tabbedPane.addTab(Resources.getString("service.gui.PRESENCE"),
presencePanel);
if (encodingsPanel.getParent() != tabbedPane)
tabbedPane.addTab(
Resources.getString("plugin.jabberaccregwizz.ENCODINGS"),
encodingsPanel);
if (tabbedPane.getParent() != this)
this.add(tabbedPane, BorderLayout.NORTH);
tabbedPane.setSelectedIndex(0);
}
else
add(accountPanel, BorderLayout.NORTH);
}
/**
* Parse the server part from the sip id and set it to server as default
* value. If Advanced option is enabled Do nothing.
* @param userName the account user name
* @return the server address
*/
String setServerFieldAccordingToUIN(String userName)
{
String serverAddress = SipAccountID.getServerFromUserName(userName);
connectionPanel.setServerFieldAccordingToUIN(serverAddress);
return serverAddress;
}
/**
* Enables/disables the next/finish button of the parent wizard.
* @param isEnabled <tt>true</tt> to enable the next button, <tt>false</tt>
* otherwise
*/
private void setNextFinishButtonEnabled(boolean isEnabled)
{
SIPAccRegWizzActivator.getUIService().getAccountRegWizardContainer()
.setNextFinishButtonEnabled(isEnabled);
}
/**
* Call this to trigger revalidation of all the input values
* and change the state of next/finish button.
*/
void reValidateInput()
{
for(ValidatingPanel panel : validatingPanels)
{
if(!panel.isValidated())
{
setNextFinishButtonEnabled(false);
return;
}
}
setNextFinishButtonEnabled(true);
}
/**
* Adds panel to the list of panels with values which need validation.
* @param panel ValidatingPanel.
*/
public void addValidatingPanel(ValidatingPanel panel)
{
validatingPanels.add(panel);
}
/**
* Return the server part of the sip user name.
*
* @param userName the username.
* @return the server part of the sip user name.
*/
static String getServerFromUserName(String userName)
{
int delimIndex = userName.indexOf("@");
if (delimIndex != -1)
{
return userName.substring(delimIndex + 1);
}
return null;
}
/**
* Indicates if this wizard is modifying an existing account or is creating
* a new one.
*
* @return <code>true</code> to indicate that this wizard is currently in
* modification mode, <code>false</code> - otherwise.
*/
public boolean isModification()
{
return isModification;
}
/**
* Saves the user input when the "Next" wizard buttons is clicked.
*
* @param registration the SIPAccountRegistration
* @return
*/
public boolean commitPage(SIPAccountRegistration registration)
{
String userID = null;
char[] password = null;
String serverAddress = null;
String proxyAddress = null;
String xcapRoot = null;
if (accountPanel.isCreateAccount())
{
NewAccount newAccount
= getCreateAccountService().createAccount();
if (newAccount != null)
{
userID = newAccount.getUserName();
password = newAccount.getPassword();
serverAddress = newAccount.getServerAddress();
proxyAddress = newAccount.getProxyAddress();
xcapRoot = newAccount.getXcapRoot();
if (serverAddress == null)
serverAddress = setServerFieldAccordingToUIN(userID);
if (proxyAddress == null)
proxyAddress = serverAddress;
}
else
{
// If we didn't succeed to create our new account, we have
// nothing more to do here.
return false;
}
}
else
{
userID = accountPanel.getUserID();
if(SipAccountID.getServerFromUserName(userID) == null
&& registration.getDefaultDomain() != null)
{
// we have only a username and we want to add
// a defautl domain
userID = userID + "@" + registration.getDefaultDomain();
setServerFieldAccordingToUIN(userID);
}
password = accountPanel.getPassword();
serverAddress = connectionPanel.getServerAddress();
proxyAddress = connectionPanel.getProxy();
}
if(userID == null || userID.trim().length() == 0)
throw new IllegalStateException("No user ID provided.");
registration.setUserID(userID);
registration.setRememberPassword(accountPanel.isRememberPassword());
registration.setPassword(new String(password));
registration.setServerAddress(serverAddress);
registration.setProxy(proxyAddress);
String displayName = accountPanel.getDisplayName();
registration.setDisplayName(displayName);
String authName = connectionPanel.getAuthenticationName();
if(authName != null && authName.length() > 0)
registration.setAuthorizationName(authName);
registration.setServerPort(connectionPanel.getServerPort());
registration.setProxyPort(connectionPanel.getProxyPort());
registration.setPreferredTransport(
connectionPanel.getSelectedTransport());
registration.setProxyAutoConfigure(
connectionPanel.isProxyAutoConfigureEnabled());
registration.setProxyForceBypassConfigure(
connectionPanel.isProxyForceBypassConfigureEnabled());
registration.setEnablePresence(
presencePanel.isPresenceEnabled());
registration.setForceP2PMode(
presencePanel.isForcePeerToPeerMode());
registration.setTlsClientCertificate(
connectionPanel.getCertificateId());
registration.setPollingPeriod(
presencePanel.getPollPeriod());
registration.setSubscriptionExpiration(
presencePanel.getSubscriptionExpiration());
registration.setKeepAliveMethod(
connectionPanel.getKeepAliveMethod());
registration.setKeepAliveInterval(
connectionPanel.getKeepAliveInterval());
registration.setDTMFMethod(
connectionPanel.getDTMFMethod());
registration.setDtmfMinimalToneDuration(
connectionPanel.getDtmfMinimalToneDuration());
SIPAccRegWizzActivator.getUIService().getAccountRegWizardContainer()
.setBackButtonEnabled(true);
securityPanel.commitPanel(registration.getSecurityRegistration());
if(xcapRoot != null)
{
registration.setXCapEnable(true);
registration.setClistOptionServerUri(xcapRoot);
}
else
{
registration.setXCapEnable(presencePanel.isXCapEnable());
registration.setXiVOEnable(presencePanel.isXiVOEnable());
registration.setClistOptionServerUri(
presencePanel.getClistOptionServerUri());
}
registration.setClistOptionUseSipCredentials(
presencePanel.isClistOptionUseSipCredentials());
registration.setClistOptionUser(presencePanel.getClistOptionUser());
registration.setClistOptionPassword(
new String(presencePanel.getClistOptionPassword()));
registration.setMessageWaitingIndications(
connectionPanel.isMessageWaitingEnabled());
registration.setVoicemailURI(connectionPanel.getVoicemailURI());
registration.setVoicemailCheckURI(connectionPanel.getVoicemailCheckURI());
encodingsPanel.commitPanel(registration.getEncodingsRegistration());
return true;
}
/**
* Loads given account registration object.
* @param sipAccReg the account registration object to load.
*/
public void loadAccount(SIPAccountRegistration sipAccReg)
{
String password = sipAccReg.getPassword();
String serverAddress = sipAccReg.getServerAddress();
String displayName = sipAccReg.getAccountPropertyString(
ProtocolProviderFactory.DISPLAY_NAME);
String authName = sipAccReg.getAuthorizationName();
String serverPort = sipAccReg.getServerPort();
String proxyAddress = sipAccReg.getProxy();
String proxyPort = sipAccReg.getProxyPort();
String preferredTransport = sipAccReg.getPreferredTransport();
boolean enablePresence = sipAccReg.isEnablePresence();
boolean forceP2P = sipAccReg.isForceP2PMode();
String clientTlsCertificateId = sipAccReg.getTlsClientCertificate();
boolean proxyAutoConfigureEnabled =
sipAccReg.isProxyAutoConfigure();
boolean proxyForceBypassConfigureEnabled =
sipAccReg.isProxyForceBypassConfigure();
String pollingPeriod = sipAccReg.getPollingPeriod();
String subscriptionPeriod = sipAccReg.getSubscriptionExpiration();
String keepAliveMethod = sipAccReg.getKeepAliveMethod();
String keepAliveInterval = sipAccReg.getKeepAliveInterval();
String dtmfMethod = sipAccReg.getDTMFMethod();
String dtmfMinimalToneDuration = sipAccReg.getDtmfMinimalToneDuration();
String voicemailURI = sipAccReg.getVoicemailURI();
String voicemailCheckURI = sipAccReg.getVoicemailCheckURI();
boolean xCapEnable = sipAccReg.isXCapEnable();
boolean xivoEnable = sipAccReg.isXiVOEnable();
boolean isServerOverridden = sipAccReg.isServerOverridden();
connectionPanel.setServerOverridden(isServerOverridden);
accountPanel.setUserIDEnabled(false);
accountPanel.setUserID(sipAccReg.getId());
if (password != null)
{
accountPanel.setPassword(password);
accountPanel.setRememberPassword(true);
}
else
{
accountPanel.setPassword("");
accountPanel.setRememberPassword(false);
}
connectionPanel.setServerAddress(serverAddress);
connectionPanel.setServerEnabled(isServerOverridden);
accountPanel.setDisplayName(displayName);
if(authName != null && authName.length() > 0)
connectionPanel.setAuthenticationName(authName);
connectionPanel.setCertificateId(clientTlsCertificateId);
connectionPanel.enablesProxyAutoConfigure(
proxyAutoConfigureEnabled);
connectionPanel.enablesProxyForceBypassConfigure(
proxyForceBypassConfigureEnabled);
connectionPanel.setServerPort(serverPort);
connectionPanel.setProxy(proxyAddress);
// The order of the next two fields is important, as a change listener
// of the transportCombo sets the proxyPortField to its default
connectionPanel.setSelectedTransport(preferredTransport);
connectionPanel.setProxyPort(proxyPort);
securityPanel.loadAccount(sipAccReg.getSecurityRegistration());
presencePanel.reinit();
presencePanel.setPresenceEnabled(enablePresence);
presencePanel.setForcePeerToPeerMode(forceP2P);
presencePanel.setPollPeriod(pollingPeriod);
presencePanel.setSubscriptionExpiration(subscriptionPeriod);
if (!enablePresence)
{
presencePanel.setPresenceOptionsEnabled(enablePresence);
}
connectionPanel.setKeepAliveMethod(keepAliveMethod);
connectionPanel.setKeepAliveInterval(keepAliveInterval);
connectionPanel.setDTMFMethod(dtmfMethod);
connectionPanel.setDtmfMinimalToneDuration(dtmfMinimalToneDuration);
boolean mwiEnabled = sipAccReg.isMessageWaitingIndicationsEnabled();
connectionPanel.setMessageWaitingIndications(mwiEnabled);
if(StringUtils.isNotEmpty(voicemailURI))
connectionPanel.setVoicemailURI(voicemailURI);
if(StringUtils.isNotEmpty(voicemailCheckURI))
connectionPanel.setVoicemailCheckURI(voicemailCheckURI);
if(xCapEnable)
{
presencePanel.setXCapEnable(xCapEnable);
presencePanel.setClistOptionEnableEnabled(xCapEnable);
}
else if(xivoEnable)
{
presencePanel.setXiVOEnable(xivoEnable);
presencePanel.setClistOptionEnableEnabled(xivoEnable);
}
boolean clistUseSipCredentials
= sipAccReg.isClistOptionUseSipCredentials();
presencePanel.setClistOptionUseSipCredentials(
clistUseSipCredentials);
presencePanel.setClistOptionUseSipCredentialsEnabled(
clistUseSipCredentials);
presencePanel.setClistOptionServerUri(
sipAccReg.getClistOptionServerUri());
presencePanel.setClistOptionUser(
sipAccReg.getClistOptionUser());
presencePanel.setClistOptionPassword(
sipAccReg.getClistOptionPassword());
encodingsPanel.loadAccount(sipAccReg.getEncodingsRegistration());
}
/**
* Returns a simple version of this registration form.
* @return the simple form component
*/
public Component getSimpleForm()
{
SIPAccountCreationFormService createAccountService
= getCreateAccountService();
if (createAccountService != null)
createAccountService.clear();
// Indicate that this panel is opened in a simple form.
accountPanel.setSimpleForm(true);
return accountPanel;
}
/**
* Sets the isModification property.
* @param isModification indicates if this form is created for modification
*/
public void setModification(boolean isModification)
{
this.isModification = isModification;
}
/**
* Returns the username example.
* @return the username example string
*/
public String getUsernameExample()
{
return wizard.getUserNameExample();
}
/**
* Sign ups through the web.
*/
public void webSignup()
{
wizard.webSignup();
}
/**
* Returns the sign up link name.
* @return the sign up link name
*/
public String getWebSignupLinkName()
{
return wizard.getWebSignupLinkName();
}
/**
* Returns the forgot password link name.
*
* @return the forgot password link name
*/
public String getForgotPasswordLinkName()
{
return wizard.getForgotPasswordLinkName();
}
/**
* Returns the forgot password link.
*
* @return the forgot password link
*/
public String getForgotPasswordLink()
{
return wizard.getForgotPasswordLink();
}
/**
* Returns an instance of <tt>CreateAccountService</tt> through which the
* user could create an account. This method is meant to be implemented by
* specific protocol provider wizards.
* @return an instance of <tt>CreateAccountService</tt>
*/
public SIPAccountCreationFormService getCreateAccountService()
{
return wizard.getCreateAccountService();
}
/**
* Returns the display label used for the sip id field.
* @return the sip id display label string.
*/
protected String getUsernameLabel()
{
return wizard.getUsernameLabel();
}
/**
* Returns the current sip registration holding all values.
* @return sip registration.
*/
public SIPAccountRegistration getRegistration()
{
return wizard.getRegistration();
}
/**
* Return the string for add existing account button.
* @return the string for add existing account button.
*/
protected String getExistingAccountLabel()
{
return wizard.getExistingAccountLabel();
}
/**
* Return the string for create new account button.
* @return the string for create new account button.
*/
protected String getCreateAccountLabel()
{
return wizard.getCreateAccountLabel();
}
/**
* Selects the create account button.
*/
void setCreateButtonSelected()
{
accountPanel.setCreateButtonSelected();
}
}
| jitsi/jitsi | src/net/java/sip/communicator/plugin/sipaccregwizz/SIPAccountRegistrationForm.java | Java | apache-2.0 | 20,283 |
/*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ehcache.core.statistics;
/**
* CachingTierOperationOutcomes
*/
public interface CachingTierOperationOutcomes {
/**
* the getOrComputeIfAbsent outcomes
*/
enum GetOrComputeIfAbsentOutcome implements CachingTierOperationOutcomes {
/**
* hit in the tier
*/
HIT,
/**
* fault from lower tier
*/
FAULTED,
/**
* fault failed
*/
FAULT_FAILED,
/**
* fault missed
*/
FAULT_FAILED_MISS,
/**
* miss
*/
MISS
}
/**
* the invalidate outcomes
*/
enum InvalidateOutcome implements CachingTierOperationOutcomes {
/**
* entry invalidated
*/
REMOVED,
/**
* miss
*/
MISS
}
/**
* the invalidateAll outcomes
*/
enum InvalidateAllOutcome implements CachingTierOperationOutcomes {
/**
* entries invalidated, without errors
*/
SUCCESS,
/**
* entries invalidated, with errors
*/
FAILURE
}
}
| rishabhmonga/ehcache3 | core/src/main/java/org/ehcache/core/statistics/CachingTierOperationOutcomes.java | Java | apache-2.0 | 1,576 |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_interface.java
// Do not modify
package org.projectfloodlight.openflow.protocol.bsntlv;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import java.util.Set;
import io.netty.buffer.ByteBuf;
public interface OFBsnTlvUdfCapability extends OFObject, OFBsnTlv {
int getType();
Set<OFBsnUdfMode> getValue();
OFVersion getVersion();
void writeTo(ByteBuf channelBuffer);
Builder createBuilder();
public interface Builder extends OFBsnTlv.Builder {
OFBsnTlvUdfCapability build();
int getType();
Set<OFBsnUdfMode> getValue();
Builder setValue(Set<OFBsnUdfMode> value);
OFVersion getVersion();
}
}
| floodlight/loxigen-artifacts | openflowj/gen-src/main/java/org/projectfloodlight/openflow/protocol/bsntlv/OFBsnTlvUdfCapability.java | Java | apache-2.0 | 1,894 |
/**
* @license Copyright 2019 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
'use strict';
const Audit = require('../audit.js');
const ComputedTBT = require('../../computed/metrics/total-blocking-time.js');
const i18n = require('../../lib/i18n/i18n.js');
const UIStrings = {
/** Description of the Total Blocking Time (TBT) metric, which calculates the total duration of blocking time for a web page. Blocking times are time periods when the page would be blocked (prevented) from responding to user input (clicks, taps, and keypresses will feel slow to respond). This is displayed within a tooltip when the user hovers on the metric name to see more. No character length limits.*/
description: 'Sum of all time periods between FCP and Time to Interactive, ' +
'when task length exceeded 50ms, expressed in milliseconds. [Learn more](https://web.dev/lighthouse-total-blocking-time/).',
};
const str_ = i18n.createMessageInstanceIdFn(__filename, UIStrings);
class TotalBlockingTime extends Audit {
/**
* @return {LH.Audit.Meta}
*/
static get meta() {
return {
id: 'total-blocking-time',
title: str_(i18n.UIStrings.totalBlockingTimeMetric),
description: str_(UIStrings.description),
scoreDisplayMode: Audit.SCORING_MODES.NUMERIC,
requiredArtifacts: ['traces', 'devtoolsLogs', 'TestedAsMobileDevice'],
};
}
/**
* @return {{mobile: {scoring: LH.Audit.ScoreOptions}, desktop: {scoring: LH.Audit.ScoreOptions}}}
*/
static get defaultOptions() {
return {
mobile: {
// According to a cluster telemetry run over top 10k sites on mobile, 5th percentile was 0ms,
// 25th percentile was 270ms and median was 895ms. These numbers include 404 pages. Picking
// thresholds according to our 25/75-th rule will be quite harsh scoring (a single 350ms task)
// after FCP will yield a score of .5. The following coefficients are semi-arbitrarily picked
// to give 600ms jank a score of .5 and 100ms jank a score of .999. We can tweak these numbers
// in the future. See https://www.desmos.com/calculator/bbsv8fedg5
scoring: {
p10: 287,
median: 600,
},
},
desktop: {
// Chosen in HTTP Archive desktop results to approximate curve easing described above.
// SELECT
// APPROX_QUANTILES(tbtValue, 100)[OFFSET(40)] AS p40_tbt,
// APPROX_QUANTILES(tbtValue, 100)[OFFSET(60)] AS p60_tbt
// FROM (
// SELECT CAST(JSON_EXTRACT_SCALAR(payload, '$._TotalBlockingTime') AS NUMERIC) AS tbtValue
// FROM `httparchive.pages.2020_04_01_desktop`
// )
scoring: {
p10: 150,
median: 350,
},
},
};
}
/**
* Audits the page to calculate Total Blocking Time.
*
* We define Blocking Time as any time interval in the loading timeline where task length exceeds
* 50ms. For example, if there is a 110ms main thread task, the last 60ms of it is blocking time.
* Total Blocking Time is the sum of all Blocking Time between First Contentful Paint and
* Interactive Time (TTI).
*
* @param {LH.Artifacts} artifacts
* @param {LH.Audit.Context} context
* @return {Promise<LH.Audit.Product>}
*/
static async audit(artifacts, context) {
const trace = artifacts.traces[Audit.DEFAULT_PASS];
const devtoolsLog = artifacts.devtoolsLogs[Audit.DEFAULT_PASS];
const metricComputationData = {trace, devtoolsLog, settings: context.settings};
const metricResult = await ComputedTBT.request(metricComputationData, context);
const isDesktop = artifacts.TestedAsMobileDevice === false;
const options = isDesktop ? context.options.desktop : context.options.mobile;
return {
score: Audit.computeLogNormalScore(
options.scoring,
metricResult.timing
),
numericValue: metricResult.timing,
numericUnit: 'millisecond',
displayValue: str_(i18n.UIStrings.ms, {timeInMs: metricResult.timing}),
};
}
}
module.exports = TotalBlockingTime;
module.exports.UIStrings = UIStrings;
| umaar/lighthouse | lighthouse-core/audits/metrics/total-blocking-time.js | JavaScript | apache-2.0 | 4,644 |
/*******************************************************************************
*
* Copyright (C) 2015-2021 the BBoxDB project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*******************************************************************************/
package org.bboxdb.distribution.zookeeper;
import org.bboxdb.distribution.membership.BBoxDBInstance;
import org.bboxdb.distribution.membership.BBoxDBInstanceState;
import org.bboxdb.distribution.membership.ZookeeperBBoxDBInstanceAdapter;
import org.bboxdb.misc.BBoxDBService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ZookeeperInstanceRegisterer implements BBoxDBService {
/**
* The name of the instance
*/
private final BBoxDBInstance instance;
/**
* The zookeeper client
*/
private final ZookeeperClient zookeeperClient;
public ZookeeperInstanceRegisterer() {
this.instance = ZookeeperClientFactory.getLocalInstanceName();
this.zookeeperClient = ZookeeperClientFactory.getZookeeperClient();
}
public ZookeeperInstanceRegisterer(final BBoxDBInstance instance, final ZookeeperClient zookeeperClient) {
this.instance = instance;
this.zookeeperClient = zookeeperClient;
}
/**
* The logger
*/
private final static Logger logger = LoggerFactory.getLogger(ZookeeperClient.class);
@Override
public void init() {
if (instance == null) {
logger.error("Unable to determine local instance name");
return;
}
try {
final ZookeeperBBoxDBInstanceAdapter zookeeperBBoxDBInstanceAdapter
= new ZookeeperBBoxDBInstanceAdapter(zookeeperClient);
zookeeperBBoxDBInstanceAdapter.updateNodeInfo(instance);
zookeeperBBoxDBInstanceAdapter.updateStateData(instance);
} catch (ZookeeperException e) {
logger.error("Exception while registering instance", e);
}
}
@Override
public void shutdown() {
try {
final ZookeeperBBoxDBInstanceAdapter zookeeperBBoxDBInstanceAdapter
= new ZookeeperBBoxDBInstanceAdapter(zookeeperClient);
instance.setState(BBoxDBInstanceState.FAILED);
zookeeperBBoxDBInstanceAdapter.updateStateData(instance);
} catch (Exception e) {
logger.error("Exception while updating instance", e);
}
}
@Override
public String getServicename() {
return "Instance registerer for: " + instance;
}
}
| jnidzwetzki/scalephant | bboxdb-server/src/main/java/org/bboxdb/distribution/zookeeper/ZookeeperInstanceRegisterer.java | Java | apache-2.0 | 2,853 |
using Lucene.Net.Analysis.Core;
using Lucene.Net.Support;
using NUnit.Framework;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
namespace Lucene.Net.Analysis.Miscellaneous
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>
/// Verifies the behavior of PatternAnalyzer.
/// </summary>
#pragma warning disable 612, 618
public class PatternAnalyzerTest : BaseTokenStreamTestCase
{
/// <summary>
/// Test PatternAnalyzer when it is configured with a non-word pattern.
/// Behavior can be similar to SimpleAnalyzer (depending upon options)
/// </summary>
[Test]
public virtual void TestNonWordPattern()
{
// Split on non-letter pattern, do not lowercase, no stopwords
PatternAnalyzer a = new PatternAnalyzer(TEST_VERSION_CURRENT, PatternAnalyzer.NON_WORD_PATTERN, false, null);
Check(a, "The quick brown Fox,the abcd1234 (56.78) dc.", new string[] { "The", "quick", "brown", "Fox", "the", "abcd", "dc" });
// split on non-letter pattern, lowercase, english stopwords
PatternAnalyzer b = new PatternAnalyzer(TEST_VERSION_CURRENT, PatternAnalyzer.NON_WORD_PATTERN, true, StopAnalyzer.ENGLISH_STOP_WORDS_SET);
Check(b, "The quick brown Fox,the abcd1234 (56.78) dc.", new string[] { "quick", "brown", "fox", "abcd", "dc" });
}
/// <summary>
/// Test PatternAnalyzer when it is configured with a whitespace pattern.
/// Behavior can be similar to WhitespaceAnalyzer (depending upon options)
/// </summary>
[Test]
public virtual void TestWhitespacePattern()
{
// Split on whitespace patterns, do not lowercase, no stopwords
PatternAnalyzer a = new PatternAnalyzer(TEST_VERSION_CURRENT, PatternAnalyzer.WHITESPACE_PATTERN, false, null);
Check(a, "The quick brown Fox,the abcd1234 (56.78) dc.", new string[] { "The", "quick", "brown", "Fox,the", "abcd1234", "(56.78)", "dc." });
// Split on whitespace patterns, lowercase, english stopwords
PatternAnalyzer b = new PatternAnalyzer(TEST_VERSION_CURRENT, PatternAnalyzer.WHITESPACE_PATTERN, true, StopAnalyzer.ENGLISH_STOP_WORDS_SET);
Check(b, "The quick brown Fox,the abcd1234 (56.78) dc.", new string[] { "quick", "brown", "fox,the", "abcd1234", "(56.78)", "dc." });
}
/// <summary>
/// Test PatternAnalyzer when it is configured with a custom pattern. In this
/// case, text is tokenized on the comma ","
/// </summary>
[Test]
public virtual void TestCustomPattern()
{
// Split on comma, do not lowercase, no stopwords
PatternAnalyzer a = new PatternAnalyzer(TEST_VERSION_CURRENT, new Regex(",", RegexOptions.Compiled), false, null);
Check(a, "Here,Are,some,Comma,separated,words,", new string[] { "Here", "Are", "some", "Comma", "separated", "words" });
// split on comma, lowercase, english stopwords
PatternAnalyzer b = new PatternAnalyzer(TEST_VERSION_CURRENT, new Regex(",", RegexOptions.Compiled), true, StopAnalyzer.ENGLISH_STOP_WORDS_SET);
Check(b, "Here,Are,some,Comma,separated,words,", new string[] { "here", "some", "comma", "separated", "words" });
}
/// <summary>
/// Test PatternAnalyzer against a large document.
/// </summary>
[Test]
public virtual void TestHugeDocument()
{
StringBuilder document = new StringBuilder();
// 5000 a's
char[] largeWord = new char[5000];
Arrays.Fill(largeWord, 'a');
document.Append(largeWord);
// a space
document.Append(' ');
// 2000 b's
char[] largeWord2 = new char[2000];
Arrays.Fill(largeWord2, 'b');
document.Append(largeWord2);
// Split on whitespace patterns, do not lowercase, no stopwords
PatternAnalyzer a = new PatternAnalyzer(TEST_VERSION_CURRENT, PatternAnalyzer.WHITESPACE_PATTERN, false, null);
Check(a, document.ToString(), new string[]
{
new string(largeWord),
new string(largeWord2)
});
}
/// <summary>
/// Verify the analyzer analyzes to the expected contents. For PatternAnalyzer,
/// several methods are verified:
/// <ul>
/// <li>Analysis with a normal Reader
/// <li>Analysis with a FastStringReader
/// <li>Analysis with a String
/// </ul>
/// </summary>
private void Check(PatternAnalyzer analyzer, string document, string[] expected)
{
// ordinary analysis of a Reader
AssertAnalyzesTo(analyzer, document, expected);
// analysis with a "FastStringReader"
TokenStream ts = analyzer.GetTokenStream("dummy", new PatternAnalyzer.FastStringReader(document));
AssertTokenStreamContents(ts, expected);
// analysis of a String, uses PatternAnalyzer.tokenStream(String, String)
TokenStream ts2 = analyzer.GetTokenStream("dummy", new StringReader(document));
AssertTokenStreamContents(ts2, expected);
}
/// <summary>
/// blast some random strings through the analyzer </summary>
[Test]
public virtual void TestRandomStrings()
{
Analyzer a = new PatternAnalyzer(TEST_VERSION_CURRENT, new Regex(",", RegexOptions.Compiled), true, StopAnalyzer.ENGLISH_STOP_WORDS_SET);
CheckRandomData(Random(), a, 10000 * RANDOM_MULTIPLIER);
}
}
#pragma warning restore 612, 618
} | laimis/lucenenet | src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/PatternAnalyzerTest.cs | C# | apache-2.0 | 6,564 |
/*
* Copyright 2015 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.ui.impl;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.eclipse.jetty.websocket.WebSocket;
import org.onlab.osgi.ServiceDirectory;
import org.onlab.osgi.ServiceNotFoundException;
import org.onosproject.cluster.ClusterService;
import org.onosproject.cluster.ControllerNode;
import org.onosproject.ui.UiConnection;
import org.onosproject.ui.UiExtensionService;
import org.onosproject.ui.UiMessageHandlerFactory;
import org.onosproject.ui.UiMessageHandler;
import org.onosproject.ui.UiTopoOverlayFactory;
import org.onosproject.ui.topo.TopoConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
/**
* Web socket capable of interacting with the GUI.
*/
public class UiWebSocket
implements UiConnection, WebSocket.OnTextMessage, WebSocket.OnControl {
private static final Logger log = LoggerFactory.getLogger(UiWebSocket.class);
private static final long MAX_AGE_MS = 30_000;
private static final byte PING = 0x9;
private static final byte PONG = 0xA;
private static final byte[] PING_DATA = new byte[]{(byte) 0xde, (byte) 0xad};
private final ServiceDirectory directory;
private Connection connection;
private FrameConnection control;
private final ObjectMapper mapper = new ObjectMapper();
private long lastActive = System.currentTimeMillis();
private Map<String, UiMessageHandler> handlers;
private TopoOverlayCache overlayCache;
/**
* Creates a new web-socket for serving data to GUI.
*
* @param directory service directory
*/
public UiWebSocket(ServiceDirectory directory) {
this.directory = directory;
}
/**
* Issues a close on the connection.
*/
synchronized void close() {
destroyHandlersAndOverlays();
if (connection.isOpen()) {
connection.close();
}
}
/**
* Indicates if this connection is idle.
*
* @return true if idle or closed
*/
synchronized boolean isIdle() {
long quietFor = System.currentTimeMillis() - lastActive;
boolean idle = quietFor > MAX_AGE_MS;
if (idle || (connection != null && !connection.isOpen())) {
log.debug("IDLE (or closed) websocket [{} ms]", quietFor);
return true;
} else if (connection != null) {
try {
control.sendControl(PING, PING_DATA, 0, PING_DATA.length);
} catch (IOException e) {
log.warn("Unable to send ping message due to: ", e);
}
}
return false;
}
@Override
public synchronized void onOpen(Connection connection) {
this.connection = connection;
this.control = (FrameConnection) connection;
try {
createHandlersAndOverlays();
sendInstanceData();
log.info("GUI client connected");
} catch (ServiceNotFoundException e) {
log.warn("Unable to open GUI connection; services have been shut-down", e);
this.connection.close();
this.connection = null;
this.control = null;
}
}
@Override
public synchronized void onClose(int closeCode, String message) {
destroyHandlersAndOverlays();
log.info("GUI client disconnected [close-code={}, message={}]",
closeCode, message);
}
@Override
public boolean onControl(byte controlCode, byte[] data, int offset, int length) {
lastActive = System.currentTimeMillis();
return true;
}
@Override
public void onMessage(String data) {
log.debug("onMessage: {}", data);
lastActive = System.currentTimeMillis();
try {
ObjectNode message = (ObjectNode) mapper.reader().readTree(data);
String type = message.path("event").asText("unknown");
UiMessageHandler handler = handlers.get(type);
if (handler != null) {
handler.process(message);
} else {
log.warn("No GUI message handler for type {}", type);
}
} catch (Exception e) {
log.warn("Unable to parse GUI message {} due to {}", data, e);
log.debug("Boom!!!", e);
}
}
@Override
public synchronized void sendMessage(ObjectNode message) {
try {
if (connection.isOpen()) {
connection.sendMessage(message.toString());
}
} catch (IOException e) {
log.warn("Unable to send message {} to GUI due to {}", message, e);
log.debug("Boom!!!", e);
}
}
@Override
public synchronized void sendMessage(String type, long sid, ObjectNode payload) {
ObjectNode message = mapper.createObjectNode();
message.put("event", type);
if (sid > 0) {
message.put("sid", sid);
}
message.set("payload", payload);
sendMessage(message);
}
// Creates new message handlers.
private synchronized void createHandlersAndOverlays() {
log.debug("creating handlers and overlays...");
handlers = new HashMap<>();
overlayCache = new TopoOverlayCache();
UiExtensionService service = directory.get(UiExtensionService.class);
service.getExtensions().forEach(ext -> {
UiMessageHandlerFactory factory = ext.messageHandlerFactory();
if (factory != null) {
factory.newHandlers().forEach(handler -> {
try {
handler.init(this, directory);
handler.messageTypes().forEach(type -> handlers.put(type, handler));
// need to inject the overlay cache into topology message handler
if (handler instanceof TopologyViewMessageHandler) {
((TopologyViewMessageHandler) handler).setOverlayCache(overlayCache);
}
} catch (Exception e) {
log.warn("Unable to setup handler {} due to", handler, e);
}
});
}
UiTopoOverlayFactory overlayFactory = ext.topoOverlayFactory();
if (overlayFactory != null) {
overlayFactory.newOverlays().forEach(overlayCache::add);
}
});
log.debug("#handlers = {}, #overlays = {}", handlers.size(),
overlayCache.size());
}
// Destroys message handlers.
private synchronized void destroyHandlersAndOverlays() {
log.debug("destroying handlers and overlays...");
handlers.forEach((type, handler) -> handler.destroy());
handlers.clear();
if (overlayCache != null) {
overlayCache.destroy();
overlayCache = null;
}
}
// Sends cluster node/instance information to allow GUI to fail-over.
private void sendInstanceData() {
ClusterService service = directory.get(ClusterService.class);
ArrayNode instances = mapper.createArrayNode();
for (ControllerNode node : service.getNodes()) {
ObjectNode instance = mapper.createObjectNode()
.put("id", node.id().toString())
.put("ip", node.ip().toString())
.put(TopoConstants.Glyphs.UI_ATTACHED,
node.equals(service.getLocalNode()));
instances.add(instance);
}
ObjectNode payload = mapper.createObjectNode();
payload.set("clusterNodes", instances);
sendMessage("bootstrap", 0, payload);
}
}
| sonu283304/onos | web/gui/src/main/java/org/onosproject/ui/impl/UiWebSocket.java | Java | apache-2.0 | 8,454 |
/* Copyright 2016 Braden Farmer
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.farmerbb.taskbar.fragment;
import android.content.ActivityNotFoundException;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.Bundle;
import android.preference.Preference;
import androidx.appcompat.app.ActionBar;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.content.ContextCompat;
import com.farmerbb.taskbar.BuildConfig;
import com.farmerbb.taskbar.R;
import com.farmerbb.taskbar.activity.MainActivity;
import com.farmerbb.taskbar.util.U;
import java.text.NumberFormat;
import java.util.Calendar;
import java.util.Currency;
import java.util.Locale;
import java.util.TimeZone;
import static com.farmerbb.taskbar.util.Constants.*;
public class AboutFragment extends SettingsFragment {
private int noThanksCount = 0;
@Override
protected void loadPrefs() {
// Add preferences
addPreferencesFromResource(R.xml.tb_pref_base);
addPreferencesFromResource(R.xml.tb_pref_about);
boolean isLibrary = U.isLibrary(getActivity());
if(!isLibrary) {
SharedPreferences pref = U.getSharedPreferences(getActivity());
if(getActivity().getPackageName().equals(BuildConfig.BASE_APPLICATION_ID)
&& U.isPlayStoreInstalled(getActivity())
&& U.isPlayStoreRelease(getActivity())
&& !U.isSystemApp(getActivity())
&& !pref.getBoolean(PREF_HIDE_DONATE, false)) {
findPreference(PREF_DONATE).setOnPreferenceClickListener(this);
} else
getPreferenceScreen().removePreference(findPreference("donate_category"));
}
// Set OnClickListeners for certain preferences
if(U.canEnableFreeform(getActivity()))
findPreference(PREF_PREF_SCREEN_FREEFORM).setOnPreferenceClickListener(this);
else
getPreferenceScreen().removePreference(findPreference(PREF_PREF_SCREEN_FREEFORM));
if(U.isDesktopModeSupported(getActivity()) && !isLibrary) {
findPreference(PREF_PREF_SCREEN_DESKTOP_MODE).setOnPreferenceClickListener(this);
findPreference(PREF_PREF_SCREEN_DESKTOP_MODE).setIcon(getDesktopModeDrawable());
} else
getPreferenceScreen().removePreference(findPreference(PREF_PREF_SCREEN_DESKTOP_MODE));
findPreference(PREF_PREF_SCREEN_GENERAL).setOnPreferenceClickListener(this);
findPreference(PREF_PREF_SCREEN_APPEARANCE).setOnPreferenceClickListener(this);
findPreference(PREF_PREF_SCREEN_RECENT_APPS).setOnPreferenceClickListener(this);
findPreference(PREF_PREF_SCREEN_ADVANCED).setOnPreferenceClickListener(this);
if(!isLibrary) {
Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("America/Denver"));
calendar.setTimeInMillis(BuildConfig.TIMESTAMP);
int year = calendar.get(Calendar.YEAR);
if(U.isConsumerBuild(getActivity())) {
String emoji = new String(Character.toChars(0x1F601));
findPreference(PREF_ABOUT).setSummary(getString(R.string.tb_pref_about_description, year, emoji));
findPreference(PREF_ABOUT).setOnPreferenceClickListener(this);
} else
findPreference(PREF_ABOUT).setSummary(getString(R.string.tb_pref_about_description_alt, year));
}
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
AppCompatActivity activity = (AppCompatActivity) getActivity();
activity.setTitle(((MainActivity) getActivity()).getAboutFragmentTitle());
ActionBar actionBar = activity.getSupportActionBar();
if(actionBar != null)
actionBar.setDisplayHomeAsUpEnabled(((MainActivity) getActivity()).getAboutFragmentBackArrow());
}
@Override
public boolean onPreferenceClick(final Preference p) {
final SharedPreferences pref = U.getSharedPreferences(getActivity());
switch(p.getKey()) {
case PREF_ABOUT:
U.checkForUpdates(getActivity());
break;
case PREF_DONATE:
NumberFormat format = NumberFormat.getCurrencyInstance();
format.setCurrency(Currency.getInstance(Locale.US));
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setTitle(R.string.tb_pref_title_donate)
.setMessage(getString(R.string.tb_dialog_donate_message, format.format(1.99)))
.setPositiveButton(R.string.tb_action_ok, (dialog, which) -> {
Intent intent2 = new Intent(Intent.ACTION_VIEW);
intent2.setData(Uri.parse("https://play.google.com/store/apps/details?id=" + BuildConfig.PAID_APPLICATION_ID));
intent2.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
try {
startActivity(intent2);
} catch (ActivityNotFoundException ignored) {}
})
.setNegativeButton(noThanksCount == 2 ? R.string.tb_action_dont_show_again : R.string.tb_action_no_thanks, (dialog, which) -> {
noThanksCount++;
if(noThanksCount == 3) {
pref.edit().putBoolean(PREF_HIDE_DONATE, true).apply();
getPreferenceScreen().removePreference(findPreference("donate_category"));
}
});
AlertDialog dialog = builder.create();
dialog.show();
break;
case PREF_PREF_SCREEN_GENERAL:
navigateTo(new GeneralFragment());
break;
case PREF_PREF_SCREEN_APPEARANCE:
navigateTo(new AppearanceFragment());
break;
case PREF_PREF_SCREEN_RECENT_APPS:
navigateTo(new RecentAppsFragment());
break;
case PREF_PREF_SCREEN_FREEFORM:
navigateTo(new FreeformModeFragment());
break;
case PREF_PREF_SCREEN_DESKTOP_MODE:
navigateTo(new DesktopModeFragment());
break;
case PREF_PREF_SCREEN_ADVANCED:
navigateTo(new AdvancedFragment());
break;
}
return super.onPreferenceClick(p);
}
private Drawable getDesktopModeDrawable() {
Drawable loadedIcon = ContextCompat.getDrawable(getActivity(), R.drawable.tb_desktop_mode);
if(loadedIcon == null) return null;
return U.resizeDrawable(getActivity(), loadedIcon, R.dimen.tb_settings_icon_size);
}
} | farmerbb/Taskbar | app/src/main/java/com/farmerbb/taskbar/fragment/AboutFragment.java | Java | apache-2.0 | 7,578 |
package io.arabesque.utils.collection;
import io.arabesque.utils.pool.IntArrayListPool;
import net.openhft.koloboke.collect.IntCollection;
import net.openhft.koloboke.collect.IntCursor;
import net.openhft.koloboke.collect.IntIterator;
import net.openhft.koloboke.function.IntConsumer;
import net.openhft.koloboke.function.IntPredicate;
import org.apache.hadoop.io.Writable;
import javax.annotation.Nonnull;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.ConcurrentModificationException;
import java.util.NoSuchElementException;
public class IntArrayList implements ReclaimableIntCollection, Writable {
private static final int INITIAL_SIZE = 16;
private int[] backingArray;
private int numElements;
private boolean preventReclaim = false;
private IntConsumer intAdder;
public IntArrayList() {
this(16);
}
public IntArrayList(int capacity) {
ensureCapacity(capacity);
this.numElements = 0;
}
public IntArrayList(boolean preventReclaim) {
this();
this.preventReclaim = preventReclaim;
}
public IntArrayList(Collection<Integer> collection) {
this(collection.size());
addAll(collection);
}
public IntArrayList(IntArrayList intArrayList) {
this(intArrayList.backingArray, intArrayList.numElements);
}
public IntArrayList(int[] intArray, int numElements) {
this.numElements = numElements;
backingArray = Arrays.copyOf(intArray, numElements);
}
public int getSize() {
return numElements;
}
public int getCapacity() {
return backingArray.length;
}
public int getRemaining() {
return getCapacity() - getSize();
}
@Override
public int size() {
return numElements;
}
@Override
public long sizeAsLong() {
return numElements;
}
@Override
public boolean ensureCapacity(long l) {
if (l > Integer.MAX_VALUE) {
throw new UnsupportedOperationException("IntArrayList does not support long sizes yet");
}
int minimumSize = (int) l;
if (backingArray == null) {
backingArray = new int[minimumSize];
}
else if (minimumSize > backingArray.length) {
int targetLength = Math.max(backingArray.length, 1);
while (targetLength < minimumSize) {
targetLength = targetLength << 1;
if (targetLength < 0) {
targetLength = minimumSize;
break;
}
}
backingArray = Arrays.copyOf(backingArray, targetLength);
}
else {
return false;
}
return true;
}
@Override
public boolean shrink() {
if (backingArray.length == numElements) {
return false;
}
backingArray = Arrays.copyOf(backingArray, numElements);
return true;
}
@Override
public boolean isEmpty() {
return size() == 0;
}
@Override
public boolean contains(Object o) {
return contains((int) o);
}
@Override
public boolean contains(int element) {
for (int i = 0; i < numElements; ++i) {
if (backingArray[i] == element) {
return true;
}
}
return false;
}
@Nonnull
@Override
public Object[] toArray() {
return toArray(new Integer[numElements]);
}
@Nonnull
@Override
public <T> T[] toArray(@Nonnull T[] ts) {
if (ts.length < numElements) {
ts = Arrays.copyOf(ts, numElements);
}
for (int i = 0; i < numElements; ++i) {
ts[i] = (T) Integer.valueOf(backingArray[i]);
}
if (ts.length > numElements) {
ts[numElements] = null;
}
return ts;
}
@Nonnull
@Override
public int[] toIntArray() {
return toArray(new int[numElements]);
}
@Nonnull
@Override
public int[] toArray(@Nonnull int[] ints) {
if (ints.length < numElements) {
return Arrays.copyOf(backingArray, numElements);
}
System.arraycopy(backingArray, 0, ints, 0, numElements);
return ints;
}
/**
* Removes all elements from the collection that are smaller than the provided value.
* @param value Reference value.
*
* WARNING: This assumes the array is ordered (sort was called just before).
*/
public void removeSmaller(int value) {
int targetPosition = Arrays.binarySearch(backingArray, 0, numElements, value);
if (targetPosition < 0) {
targetPosition = -targetPosition - 1;
}
numElements -= targetPosition;
if (targetPosition != 0 && numElements > 0) {
System.arraycopy(backingArray, targetPosition, backingArray, 0, numElements);
}
}
/**
* Removes all elements from the collection that are bigger than the provided value.
* @param value Reference value.
*
* WARNING: This assumes the array is ordered (sort was called just before).
*/
public void removeBigger(int value) {
int targetPosition = Arrays.binarySearch(backingArray, 0, numElements, value);
if (targetPosition < 0) {
targetPosition = -targetPosition - 1;
}
numElements = targetPosition;
}
@Override
public void write(DataOutput dataOutput) throws IOException {
dataOutput.writeInt(numElements);
for (int i = 0; i < numElements; ++i) {
dataOutput.writeInt(backingArray[i]);
}
}
@Override
public void readFields(DataInput dataInput) throws IOException {
clear();
numElements = dataInput.readInt();
ensureCanAddNElements(numElements);
for (int i = 0; i < numElements; ++i) {
backingArray[i] = dataInput.readInt();
}
}
@Override
public void reclaim() {
if (preventReclaim) {
return;
}
IntArrayListPool.instance().reclaimObject(this);
}
private class IntArrayListCursor implements IntCursor {
private int index;
public IntArrayListCursor() {
this.index = -1;
}
@Override
public void forEachForward(@Nonnull IntConsumer intConsumer) {
int localNumElements = numElements;
for (int i = index; i < localNumElements; ++i) {
intConsumer.accept(backingArray[i]);
}
if(localNumElements != numElements) {
throw new ConcurrentModificationException();
} else {
this.index = numElements;
}
}
@Override
public int elem() {
if (index < 0 || index >= numElements) {
throw new IllegalStateException();
}
return backingArray[index];
}
@Override
public boolean moveNext() {
++index;
return index >= 0 && index < numElements;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
private class IntArrayListIterator implements IntIterator {
private int index;
public IntArrayListIterator() {
this.index = -1;
}
@Override
public int nextInt() {
if (index >= numElements - 1) {
throw new NoSuchElementException();
}
return backingArray[++index];
}
@Override
public void forEachRemaining(@Nonnull IntConsumer intConsumer) {
int localNumElements = numElements;
for (int i = index + 1; i < localNumElements - 1; ++i) {
intConsumer.accept(backingArray[i]);
}
if (localNumElements != numElements) {
throw new ConcurrentModificationException();
} else {
index = numElements - 1;
}
}
@Override
public boolean hasNext() {
return index < numElements - 1;
}
@Override
public Integer next() {
return nextInt();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
@Nonnull
@Override
public IntCursor cursor() {
return new IntArrayListCursor();
}
@Nonnull
@Override
public IntIterator iterator() {
return new IntArrayListIterator();
}
@Override
public void forEach(@Nonnull IntConsumer intConsumer) {
for (int i = 0; i < numElements; ++i) {
intConsumer.accept(backingArray[i]);
}
}
@Override
public boolean forEachWhile(@Nonnull IntPredicate intPredicate) {
for (int i = 0; i < numElements; ++i) {
if (!intPredicate.test(backingArray[i])) {
return false;
}
}
return true;
}
@Override
public boolean add(@Nonnull Integer integer) {
return add((int) integer);
}
public boolean add(int newValue) {
ensureCanAddNewElement();
backingArray[numElements++] = newValue;
return true;
}
@Override
public boolean remove(Object o) {
return removeInt((int) o);
}
@Override
public boolean containsAll(Collection<?> c) {
for (Object o : c) {
if (!contains(o)) {
return false;
}
}
return true;
}
@Override
public boolean addAll(Collection<? extends Integer> c) {
if (c == null || c.size() == 0) {
return false;
}
ensureCanAddNElements(c.size());
for (int o : c) {
add(o);
}
return true;
}
public boolean addAll(IntCollection c) {
if (c == null || c.size() == 0) {
return false;
}
ensureCanAddNElements(c.size());
if (intAdder == null) {
intAdder = new IntConsumer() {
@Override
public void accept(int i) {
add(i);
}
};
}
c.forEach(intAdder);
return true;
}
@Override
public boolean removeAll(Collection<?> c) {
return removeBasedOnCollection(c, true);
}
@Override
public boolean retainAll(Collection<?> c) {
return removeBasedOnCollection(c, false);
}
private boolean removeBasedOnCollection(Collection<?> c, boolean ifPresent) {
boolean removedAtLeastOne = false;
for (int i = numElements - 1; i >= 0; --i) {
int e = backingArray[i];
boolean collectionContainsE = c.contains(e);
if (ifPresent == collectionContainsE) {
remove(i);
removedAtLeastOne = true;
}
}
return removedAtLeastOne;
}
@Override
public boolean removeInt(int targetValue) {
for (int i = 0; i < numElements; ++i) {
int e = backingArray[i];
if (e == targetValue) {
remove(i);
return true;
}
}
return false;
}
@Override
public boolean removeIf(@Nonnull IntPredicate intPredicate) {
boolean removedAtLeastOne = false;
for (int i = 0; i < numElements; ++i) {
if (intPredicate.test(backingArray[i])) {
removedAtLeastOne = true;
remove(i);
}
}
return removedAtLeastOne;
}
public int remove(int index) {
if (index < 0 || index >= numElements) {
throw new IllegalArgumentException();
}
int removedElement = backingArray[index];
--numElements;
if (index != numElements) {
System.arraycopy(backingArray, index + 1, backingArray, index, numElements - index);
}
return removedElement;
}
public int get(int index) {
checkIndex(index);
return getUnchecked(index);
}
public int getUnchecked(int index) {
return backingArray[index];
}
public void set(int index, int newValue) {
checkIndex(index);
setUnchecked(index, newValue);
}
public void setUnchecked(int index, int newValue) {
backingArray[index] = newValue;
}
public void clear() {
numElements = 0;
}
public int[] getBackingArray() {
return backingArray;
}
public void sort() {
Arrays.sort(backingArray, 0, numElements);
}
public boolean ensureCapacity(int targetCapacity) {
return ensureCapacity((long) targetCapacity);
}
@Override
public String toString() {
StringBuilder strBuilder = new StringBuilder();
strBuilder.append("IntArrayList{");
strBuilder.append("backingArray=");
boolean first = true;
for (int i = 0; i < numElements; ++i) {
if (!first) {
strBuilder.append(", ");
}
strBuilder.append(backingArray[i]);
first = false;
}
strBuilder.append(", numElements=");
strBuilder.append(numElements);
strBuilder.append("}");
return strBuilder.toString();
}
private void checkIndex(int index) {
if (index < 0 || index >= numElements) {
throw new ArrayIndexOutOfBoundsException(index);
}
}
private void ensureCanAddNewElement() {
ensureCanAddNElements(1);
}
private void ensureCanAddNElements(int numNewElements) {
int newTargetSize;
if (backingArray == null) {
newTargetSize = Math.max(numNewElements, INITIAL_SIZE);
} else if (backingArray.length < numElements + numNewElements) {
newTargetSize = getSizeWithPaddingWithoutOverflow(numNewElements, numElements + numNewElements);
} else {
return;
}
ensureCapacity(newTargetSize);
}
private int getSizeWithPaddingWithoutOverflow(int targetSize, int currentSize) {
if (currentSize > targetSize) {
return currentSize;
}
int sizeWithPadding = Math.max(currentSize, 1);
while (true) {
int previousSizeWithPadding = sizeWithPadding;
// Multiply by 2
sizeWithPadding <<= 1;
// If we saw an overflow, return simple targetSize
if (previousSizeWithPadding > sizeWithPadding) {
return targetSize;
}
if (sizeWithPadding >= targetSize) {
return sizeWithPadding;
}
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
IntArrayList integers = (IntArrayList) o;
return equals(integers);
}
public boolean equals(IntArrayList intArrayList) {
if (this == intArrayList) return true;
if (intArrayList == null) return false;
if (numElements != intArrayList.numElements) return false;
for (int i = 0; i < numElements; ++i) {
if (backingArray[i] != intArrayList.backingArray[i]) {
return false;
}
}
return true;
}
public boolean equalsCollection(Collection<Integer> intCollection) {
if (this == intCollection) return true;
if (intCollection == null) return false;
if (numElements != intCollection.size()) return false;
int i = 0;
for (Integer e : intCollection) {
if (backingArray[i] != e) {
return false;
}
++i;
}
return true;
}
public boolean equalsIntCollection(IntCollection intCollection) {
if (this == intCollection) return true;
if (intCollection == null) return false;
if (numElements != intCollection.size()) return false;
IntCursor intCursor = intCollection.cursor();
int i = 0;
while (intCursor.moveNext()) {
if (backingArray[i] != intCursor.elem()) {
return false;
}
++i;
}
return true;
}
@Override
public int hashCode() {
int result = numElements;
for (int i = 0; i < numElements; ++i) {
result = 31 * result + backingArray[i];
}
return result;
}
public int pop() {
return remove(numElements - 1);
}
public void removeLast() {
removeLast(1);
}
public void removeLast(int n) {
numElements = Math.max(0, numElements - n);
}
public int getLast() {
int index = numElements - 1;
if (index >= 0) {
return backingArray[index];
}
else {
throw new ArrayIndexOutOfBoundsException(index);
}
}
public int getLastOrDefault(int def) {
int index = numElements - 1;
if (index >= 0) {
return backingArray[index];
}
else {
return def;
}
}
public int findLargestCommonPrefixEnd(IntArrayList other) {
if (other == null) {
return 0;
}
int pos;
int minPos = Math.min(size(), other.size());
for (pos = 0; pos < minPos; ++pos) {
if (getUnchecked(pos) != other.getUnchecked(pos)) {
return pos;
}
}
return pos;
}
} | Qatar-Computing-Research-Institute/Arabesque | src/main/java/io/arabesque/utils/collection/IntArrayList.java | Java | apache-2.0 | 17,867 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.index.p2;
import java.util.List;
import javax.annotation.Nonnull;
import org.apache.jackrabbit.oak.spi.query.QueryIndex;
import org.apache.jackrabbit.oak.spi.query.QueryIndexProvider;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import com.google.common.collect.ImmutableList;
/**
* A provider for property indexes.
* <p>
* Even if there are multiple index definitions, there is only actually one
* PropertyIndex instance, which is used for all indexes.
*
* @see Property2Index
*
*/
public class Property2IndexProvider implements QueryIndexProvider {
@Override @Nonnull
public List<QueryIndex> getQueryIndexes(NodeState state) {
return ImmutableList.<QueryIndex>of(new Property2Index());
}
}
| tteofili/jackrabbit-oak | oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/p2/Property2IndexProvider.java | Java | apache-2.0 | 1,581 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.util;
import com.google.common.net.InetAddresses;
import org.apache.commons.lang.SystemUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.net.NetUtils;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.URI;
import java.net.URISyntaxException;
import java.text.DateFormat;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* General string utils
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class StringUtils {
/**
* Priority of the StringUtils shutdown hook.
*/
public static final int SHUTDOWN_HOOK_PRIORITY = 0;
/**
* Shell environment variables: $ followed by one letter or _ followed by
* multiple letters, numbers, or underscores. The group captures the
* environment variable name without the leading $.
*/
public static final Pattern SHELL_ENV_VAR_PATTERN =
Pattern.compile("\\$([A-Za-z_]{1}[A-Za-z0-9_]*)");
/**
* Windows environment variables: surrounded by %. The group captures the
* environment variable name without the leading and trailing %.
*/
public static final Pattern WIN_ENV_VAR_PATTERN = Pattern.compile("%(.*?)%");
/**
* Regular expression that matches and captures environment variable names
* according to platform-specific rules.
*/
public static final Pattern ENV_VAR_PATTERN = Shell.WINDOWS ?
WIN_ENV_VAR_PATTERN : SHELL_ENV_VAR_PATTERN;
/**
* Make a string representation of the exception.
* @param e The exception to stringify
* @return A string with exception name and call stack.
*/
public static String stringifyException(Throwable e) {
StringWriter stm = new StringWriter();
PrintWriter wrt = new PrintWriter(stm);
e.printStackTrace(wrt);
wrt.close();
return stm.toString();
}
/**
* Given a full hostname, return the word upto the first dot.
* @param fullHostname the full hostname
* @return the hostname to the first dot
*/
public static String simpleHostname(String fullHostname) {
if (InetAddresses.isInetAddress(fullHostname)) {
return fullHostname;
}
int offset = fullHostname.indexOf('.');
if (offset != -1) {
return fullHostname.substring(0, offset);
}
return fullHostname;
}
/**
* Given an integer, return a string that is in an approximate, but human
* readable format.
* @param number the number to format
* @return a human readable form of the integer
*
* @deprecated use {@link TraditionalBinaryPrefix#long2String(long, String, int)}.
*/
@Deprecated
public static String humanReadableInt(long number) {
return TraditionalBinaryPrefix.long2String(number, "", 1);
}
/** The same as String.format(Locale.ENGLISH, format, objects). */
public static String format(final String format, final Object... objects) {
return String.format(Locale.ENGLISH, format, objects);
}
/**
* Format a percentage for presentation to the user.
* @param fraction the percentage as a fraction, e.g. 0.1 = 10%
* @param decimalPlaces the number of decimal places
* @return a string representation of the percentage
*/
public static String formatPercent(double fraction, int decimalPlaces) {
return format("%." + decimalPlaces + "f%%", fraction*100);
}
/**
* Given an array of strings, return a comma-separated list of its elements.
* @param strs Array of strings
* @return Empty string if strs.length is 0, comma separated list of strings
* otherwise
*/
public static String arrayToString(String[] strs) {
if (strs.length == 0) { return ""; }
StringBuilder sbuf = new StringBuilder();
sbuf.append(strs[0]);
for (int idx = 1; idx < strs.length; idx++) {
sbuf.append(",");
sbuf.append(strs[idx]);
}
return sbuf.toString();
}
/**
* Given an array of bytes it will convert the bytes to a hex string
* representation of the bytes
* @param bytes
* @param start start index, inclusively
* @param end end index, exclusively
* @return hex string representation of the byte array
*/
public static String byteToHexString(byte[] bytes, int start, int end) {
if (bytes == null) {
throw new IllegalArgumentException("bytes == null");
}
StringBuilder s = new StringBuilder();
for(int i = start; i < end; i++) {
s.append(format("%02x", bytes[i]));
}
return s.toString();
}
/** Same as byteToHexString(bytes, 0, bytes.length). */
public static String byteToHexString(byte bytes[]) {
return byteToHexString(bytes, 0, bytes.length);
}
/**
* Given a hexstring this will return the byte array corresponding to the
* string
* @param hex the hex String array
* @return a byte array that is a hex string representation of the given
* string. The size of the byte array is therefore hex.length/2
*/
public static byte[] hexStringToByte(String hex) {
byte[] bts = new byte[hex.length() / 2];
for (int i = 0; i < bts.length; i++) {
bts[i] = (byte) Integer.parseInt(hex.substring(2 * i, 2 * i + 2), 16);
}
return bts;
}
/**
*
* @param uris
*/
public static String uriToString(URI[] uris){
if (uris == null) {
return null;
}
StringBuilder ret = new StringBuilder(uris[0].toString());
for(int i = 1; i < uris.length;i++){
ret.append(",");
ret.append(uris[i].toString());
}
return ret.toString();
}
/**
* @param str
* The string array to be parsed into an URI array.
* @return <tt>null</tt> if str is <tt>null</tt>, else the URI array
* equivalent to str.
* @throws IllegalArgumentException
* If any string in str violates RFC 2396.
*/
public static URI[] stringToURI(String[] str){
if (str == null)
return null;
URI[] uris = new URI[str.length];
for (int i = 0; i < str.length;i++){
try{
uris[i] = new URI(str[i]);
}catch(URISyntaxException ur){
throw new IllegalArgumentException(
"Failed to create uri for " + str[i], ur);
}
}
return uris;
}
/**
*
* @param str
*/
public static Path[] stringToPath(String[] str){
if (str == null) {
return null;
}
Path[] p = new Path[str.length];
for (int i = 0; i < str.length;i++){
p[i] = new Path(str[i]);
}
return p;
}
/**
*
* Given a finish and start time in long milliseconds, returns a
* String in the format Xhrs, Ymins, Z sec, for the time difference between two times.
* If finish time comes before start time then negative valeus of X, Y and Z wil return.
*
* @param finishTime finish time
* @param startTime start time
*/
public static String formatTimeDiff(long finishTime, long startTime){
long timeDiff = finishTime - startTime;
return formatTime(timeDiff);
}
/**
*
* Given the time in long milliseconds, returns a
* String in the format Xhrs, Ymins, Z sec.
*
* @param timeDiff The time difference to format
*/
public static String formatTime(long timeDiff){
StringBuilder buf = new StringBuilder();
long hours = timeDiff / (60*60*1000);
long rem = (timeDiff % (60*60*1000));
long minutes = rem / (60*1000);
rem = rem % (60*1000);
long seconds = rem / 1000;
if (hours != 0){
buf.append(hours);
buf.append("hrs, ");
}
if (minutes != 0){
buf.append(minutes);
buf.append("mins, ");
}
// return "0sec if no difference
buf.append(seconds);
buf.append("sec");
return buf.toString();
}
/**
* Formats time in ms and appends difference (finishTime - startTime)
* as returned by formatTimeDiff().
* If finish time is 0, empty string is returned, if start time is 0
* then difference is not appended to return value.
* @param dateFormat date format to use
* @param finishTime fnish time
* @param startTime start time
* @return formatted value.
*/
public static String getFormattedTimeWithDiff(DateFormat dateFormat,
long finishTime, long startTime){
StringBuilder buf = new StringBuilder();
if (0 != finishTime) {
buf.append(dateFormat.format(new Date(finishTime)));
if (0 != startTime){
buf.append(" (" + formatTimeDiff(finishTime , startTime) + ")");
}
}
return buf.toString();
}
/**
* Returns an arraylist of strings.
* @param str the comma seperated string values
* @return the arraylist of the comma seperated string values
*/
public static String[] getStrings(String str){
Collection<String> values = getStringCollection(str);
if(values.size() == 0) {
return null;
}
return values.toArray(new String[values.size()]);
}
/**
* Returns a collection of strings.
* @param str comma seperated string values
* @return an <code>ArrayList</code> of string values
*/
public static Collection<String> getStringCollection(String str){
String delim = ",";
// String delim = ";";
return getStringCollection(str, delim);
}
/**
* Returns a collection of strings.
*
* @param str
* String to parse
* @param delim
* delimiter to separate the values
* @return Collection of parsed elements.
*/
public static Collection<String> getStringCollection(String str, String delim) {
List<String> values = new ArrayList<String>();
if (str == null)
return values;
StringTokenizer tokenizer = new StringTokenizer(str, delim);
while (tokenizer.hasMoreTokens()) {
values.add(tokenizer.nextToken());
}
return values;
}
/**
* Splits a comma separated value <code>String</code>, trimming leading and trailing whitespace on each value.
* Duplicate and empty values are removed.
* @param str a comma separated <String> with values
* @return a <code>Collection</code> of <code>String</code> values
*/
public static Collection<String> getTrimmedStringCollection(String str){
Set<String> set = new LinkedHashSet<String>(
Arrays.asList(getTrimmedStrings(str)));
set.remove("");
return set;
}
/**
* Splits a comma separated value <code>String</code>, trimming leading and trailing whitespace on each value.
* @param str a comma separated <String> with values
* @return an array of <code>String</code> values
*/
public static String[] getTrimmedStrings(String str){
if (null == str || str.trim().isEmpty()) {
return emptyStringArray;
}
return str.trim().split("\\s*,\\s*");
// return str.trim().split("\\s*;\\s*");
}
public static String[] getMyTrimmedStrings(String str){
if (null == str || str.trim().isEmpty()) {
return emptyStringArray;
}
// return str.trim().split("\\s*,\\s*");
return str.trim().split("\\s*;\\s*");
}
final public static String[] emptyStringArray = {};
final public static char COMMA = ',';
final public static String COMMA_STR = ",";
final public static char ESCAPE_CHAR = '\\';
/**
* Split a string using the default separator
* @param str a string that may have escaped separator
* @return an array of strings
*/
public static String[] split(String str) {
return split(str, ESCAPE_CHAR, COMMA);
}
/**
* Split a string using the given separator
* @param str a string that may have escaped separator
* @param escapeChar a char that be used to escape the separator
* @param separator a separator char
* @return an array of strings
*/
public static String[] split(
String str, char escapeChar, char separator) {
if (str==null) {
return null;
}
ArrayList<String> strList = new ArrayList<String>();
StringBuilder split = new StringBuilder();
int index = 0;
while ((index = findNext(str, separator, escapeChar, index, split)) >= 0) {
++index; // move over the separator for next search
strList.add(split.toString());
split.setLength(0); // reset the buffer
}
strList.add(split.toString());
// remove trailing empty split(s)
int last = strList.size(); // last split
while (--last>=0 && "".equals(strList.get(last))) {
strList.remove(last);
}
return strList.toArray(new String[strList.size()]);
}
/**
* Split a string using the given separator, with no escaping performed.
* @param str a string to be split. Note that this may not be null.
* @param separator a separator char
* @return an array of strings
*/
public static String[] split(
String str, char separator) {
// String.split returns a single empty result for splitting the empty
// string.
if (str.isEmpty()) {
return new String[]{""};
}
ArrayList<String> strList = new ArrayList<String>();
int startIndex = 0;
int nextIndex = 0;
while ((nextIndex = str.indexOf(separator, startIndex)) != -1) {
strList.add(str.substring(startIndex, nextIndex));
startIndex = nextIndex + 1;
}
strList.add(str.substring(startIndex));
// remove trailing empty split(s)
int last = strList.size(); // last split
while (--last>=0 && "".equals(strList.get(last))) {
strList.remove(last);
}
return strList.toArray(new String[strList.size()]);
}
/**
* Finds the first occurrence of the separator character ignoring the escaped
* separators starting from the index. Note the substring between the index
* and the position of the separator is passed.
* @param str the source string
* @param separator the character to find
* @param escapeChar character used to escape
* @param start from where to search
* @param split used to pass back the extracted string
*/
public static int findNext(String str, char separator, char escapeChar,
int start, StringBuilder split) {
int numPreEscapes = 0;
for (int i = start; i < str.length(); i++) {
char curChar = str.charAt(i);
if (numPreEscapes == 0 && curChar == separator) { // separator
return i;
} else {
split.append(curChar);
numPreEscapes = (curChar == escapeChar)
? (++numPreEscapes) % 2
: 0;
}
}
return -1;
}
/**
* Escape commas in the string using the default escape char
* @param str a string
* @return an escaped string
*/
public static String escapeString(String str) {
return escapeString(str, ESCAPE_CHAR, COMMA);
}
/**
* Escape <code>charToEscape</code> in the string
* with the escape char <code>escapeChar</code>
*
* @param str string
* @param escapeChar escape char
* @param charToEscape the char to be escaped
* @return an escaped string
*/
public static String escapeString(
String str, char escapeChar, char charToEscape) {
return escapeString(str, escapeChar, new char[] {charToEscape});
}
// check if the character array has the character
private static boolean hasChar(char[] chars, char character) {
for (char target : chars) {
if (character == target) {
return true;
}
}
return false;
}
/**
* @param charsToEscape array of characters to be escaped
*/
public static String escapeString(String str, char escapeChar,
char[] charsToEscape) {
if (str == null) {
return null;
}
StringBuilder result = new StringBuilder();
for (int i=0; i<str.length(); i++) {
char curChar = str.charAt(i);
if (curChar == escapeChar || hasChar(charsToEscape, curChar)) {
// special char
result.append(escapeChar);
}
result.append(curChar);
}
return result.toString();
}
/**
* Unescape commas in the string using the default escape char
* @param str a string
* @return an unescaped string
*/
public static String unEscapeString(String str) {
return unEscapeString(str, ESCAPE_CHAR, COMMA);
}
/**
* Unescape <code>charToEscape</code> in the string
* with the escape char <code>escapeChar</code>
*
* @param str string
* @param escapeChar escape char
* @param charToEscape the escaped char
* @return an unescaped string
*/
public static String unEscapeString(
String str, char escapeChar, char charToEscape) {
return unEscapeString(str, escapeChar, new char[] {charToEscape});
}
/**
* @param charsToEscape array of characters to unescape
*/
public static String unEscapeString(String str, char escapeChar,
char[] charsToEscape) {
if (str == null) {
return null;
}
StringBuilder result = new StringBuilder(str.length());
boolean hasPreEscape = false;
for (int i=0; i<str.length(); i++) {
char curChar = str.charAt(i);
if (hasPreEscape) {
if (curChar != escapeChar && !hasChar(charsToEscape, curChar)) {
// no special char
throw new IllegalArgumentException("Illegal escaped string " + str +
" unescaped " + escapeChar + " at " + (i-1));
}
// otherwise discard the escape char
result.append(curChar);
hasPreEscape = false;
} else {
if (hasChar(charsToEscape, curChar)) {
throw new IllegalArgumentException("Illegal escaped string " + str +
" unescaped " + curChar + " at " + i);
} else if (curChar == escapeChar) {
hasPreEscape = true;
} else {
result.append(curChar);
}
}
}
if (hasPreEscape ) {
throw new IllegalArgumentException("Illegal escaped string " + str +
", not expecting " + escapeChar + " in the end." );
}
return result.toString();
}
/**
* Return a message for logging.
* @param prefix prefix keyword for the message
* @param msg content of the message
* @return a message for logging
*/
private static String toStartupShutdownString(String prefix, String [] msg) {
StringBuilder b = new StringBuilder(prefix);
b.append("\n/************************************************************");
for(String s : msg)
b.append("\n" + prefix + s);
b.append("\n************************************************************/");
return b.toString();
}
/**
* Print a log message for starting up and shutting down
* @param clazz the class of the server
* @param args arguments
* @param LOG the target log object
*/
public static void startupShutdownMessage(Class<?> clazz, String[] args,
final org.apache.commons.logging.Log LOG) {
final String hostname = NetUtils.getHostname();
final String classname = clazz.getSimpleName();
LOG.info(
toStartupShutdownString("STARTUP_MSG: ", new String[] {
"Starting " + classname,
" host = " + hostname,
" args = " + Arrays.asList(args),
" version = " + VersionInfo.getVersion(),
" classpath = " + System.getProperty("java.class.path"),
" build = " + VersionInfo.getUrl() + " -r "
+ VersionInfo.getRevision()
+ "; compiled by '" + VersionInfo.getUser()
+ "' on " + VersionInfo.getDate(),
" java = " + System.getProperty("java.version") }
)
);
if (SystemUtils.IS_OS_UNIX) {
try {
SignalLogger.INSTANCE.register(LOG);
} catch (Throwable t) {
LOG.warn("failed to register any UNIX signal loggers: ", t);
}
}
ShutdownHookManager.get().addShutdownHook(
new Runnable() {
@Override
public void run() {
LOG.info(toStartupShutdownString("SHUTDOWN_MSG: ", new String[]{
"Shutting down " + classname + " at " + hostname}));
}
}, SHUTDOWN_HOOK_PRIORITY);
}
/**
* The traditional binary prefixes, kilo, mega, ..., exa,
* which can be represented by a 64-bit integer.
* TraditionalBinaryPrefix symbol are case insensitive.
*/
public static enum TraditionalBinaryPrefix {
KILO(10),
MEGA(KILO.bitShift + 10),
GIGA(MEGA.bitShift + 10),
TERA(GIGA.bitShift + 10),
PETA(TERA.bitShift + 10),
EXA (PETA.bitShift + 10);
public final long value;
public final char symbol;
public final int bitShift;
public final long bitMask;
private TraditionalBinaryPrefix(int bitShift) {
this.bitShift = bitShift;
this.value = 1L << bitShift;
this.bitMask = this.value - 1L;
this.symbol = toString().charAt(0);
}
/**
* @return The TraditionalBinaryPrefix object corresponding to the symbol.
*/
public static TraditionalBinaryPrefix valueOf(char symbol) {
symbol = Character.toUpperCase(symbol);
for(TraditionalBinaryPrefix prefix : TraditionalBinaryPrefix.values()) {
if (symbol == prefix.symbol) {
return prefix;
}
}
throw new IllegalArgumentException("Unknown symbol '" + symbol + "'");
}
/**
* Convert a string to long.
* The input string is first be trimmed
* and then it is parsed with traditional binary prefix.
*
* For example,
* "-1230k" will be converted to -1230 * 1024 = -1259520;
* "891g" will be converted to 891 * 1024^3 = 956703965184;
*
* @param s input string
* @return a long value represented by the input string.
*/
public static long string2long(String s) {
s = s.trim();
final int lastpos = s.length() - 1;
final char lastchar = s.charAt(lastpos);
if (Character.isDigit(lastchar))
return Long.parseLong(s);
else {
long prefix;
try {
prefix = TraditionalBinaryPrefix.valueOf(lastchar).value;
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Invalid size prefix '" + lastchar
+ "' in '" + s
+ "'. Allowed prefixes are k, m, g, t, p, e(case insensitive)");
}
long num = Long.parseLong(s.substring(0, lastpos));
if (num > (Long.MAX_VALUE/prefix) || num < (Long.MIN_VALUE/prefix)) {
throw new IllegalArgumentException(s + " does not fit in a Long");
}
return num * prefix;
}
}
/**
* Convert a long integer to a string with traditional binary prefix.
*
* @param n the value to be converted
* @param unit The unit, e.g. "B" for bytes.
* @param decimalPlaces The number of decimal places.
* @return a string with traditional binary prefix.
*/
public static String long2String(long n, String unit, int decimalPlaces) {
if (unit == null) {
unit = "";
}
//take care a special case
if (n == Long.MIN_VALUE) {
return "-8 " + EXA.symbol + unit;
}
final StringBuilder b = new StringBuilder();
//take care negative numbers
if (n < 0) {
b.append('-');
n = -n;
}
if (n < KILO.value) {
//no prefix
b.append(n);
return (unit.isEmpty()? b: b.append(" ").append(unit)).toString();
} else {
//find traditional binary prefix
int i = 0;
for(; i < values().length && n >= values()[i].value; i++);
TraditionalBinaryPrefix prefix = values()[i - 1];
if ((n & prefix.bitMask) == 0) {
//exact division
b.append(n >> prefix.bitShift);
} else {
final String format = "%." + decimalPlaces + "f";
String s = format(format, n/(double)prefix.value);
//check a special rounding up case
if (s.startsWith("1024")) {
prefix = values()[i];
s = format(format, n/(double)prefix.value);
}
b.append(s);
}
return b.append(' ').append(prefix.symbol).append(unit).toString();
}
}
}
/**
* Escapes HTML Special characters present in the string.
* @param string
* @return HTML Escaped String representation
*/
public static String escapeHTML(String string) {
if(string == null) {
return null;
}
StringBuilder sb = new StringBuilder();
boolean lastCharacterWasSpace = false;
char[] chars = string.toCharArray();
for(char c : chars) {
if(c == ' ') {
if(lastCharacterWasSpace){
lastCharacterWasSpace = false;
sb.append(" ");
}else {
lastCharacterWasSpace=true;
sb.append(" ");
}
}else {
lastCharacterWasSpace = false;
switch(c) {
case '<': sb.append("<"); break;
case '>': sb.append(">"); break;
case '&': sb.append("&"); break;
case '"': sb.append("""); break;
default : sb.append(c);break;
}
}
}
return sb.toString();
}
/**
* @return a byte description of the given long interger value.
*/
public static String byteDesc(long len) {
return TraditionalBinaryPrefix.long2String(len, "B", 2);
}
/** @deprecated use StringUtils.format("%.2f", d). */
@Deprecated
public static String limitDecimalTo2(double d) {
return format("%.2f", d);
}
/**
* Concatenates strings, using a separator.
*
* @param separator Separator to join with.
* @param strings Strings to join.
*/
public static String join(CharSequence separator, Iterable<?> strings) {
Iterator<?> i = strings.iterator();
if (!i.hasNext()) {
return "";
}
StringBuilder sb = new StringBuilder(i.next().toString());
while (i.hasNext()) {
sb.append(separator);
sb.append(i.next().toString());
}
return sb.toString();
}
/**
* Concatenates strings, using a separator.
*
* @param separator to join with
* @param strings to join
* @return the joined string
*/
public static String join(CharSequence separator, String[] strings) {
// Ideally we don't have to duplicate the code here if array is iterable.
StringBuilder sb = new StringBuilder();
boolean first = true;
for (String s : strings) {
if (first) {
first = false;
} else {
sb.append(separator);
}
sb.append(s);
}
return sb.toString();
}
/**
* Convert SOME_STUFF to SomeStuff
*
* @param s input string
* @return camelized string
*/
public static String camelize(String s) {
StringBuilder sb = new StringBuilder();
String[] words = split(s.toLowerCase(Locale.US), ESCAPE_CHAR, '_');
for (String word : words)
sb.append(org.apache.commons.lang.StringUtils.capitalize(word));
return sb.toString();
}
/**
* Matches a template string against a pattern, replaces matched tokens with
* the supplied replacements, and returns the result. The regular expression
* must use a capturing group. The value of the first capturing group is used
* to look up the replacement. If no replacement is found for the token, then
* it is replaced with the empty string.
*
* For example, assume template is "%foo%_%bar%_%baz%", pattern is "%(.*?)%",
* and replacements contains 2 entries, mapping "foo" to "zoo" and "baz" to
* "zaz". The result returned would be "zoo__zaz".
*
* @param template String template to receive replacements
* @param pattern Pattern to match for identifying tokens, must use a capturing
* group
* @param replacements Map<String, String> mapping tokens identified by the
* capturing group to their replacement values
* @return String template with replacements
*/
public static String replaceTokens(String template, Pattern pattern,
Map<String, String> replacements) {
StringBuffer sb = new StringBuffer();
Matcher matcher = pattern.matcher(template);
while (matcher.find()) {
String replacement = replacements.get(matcher.group(1));
if (replacement == null) {
replacement = "";
}
matcher.appendReplacement(sb, Matcher.quoteReplacement(replacement));
}
matcher.appendTail(sb);
return sb.toString();
}
/**
* Get stack trace for a given thread.
*/
public static String getStackTrace(Thread t) {
final StackTraceElement[] stackTrace = t.getStackTrace();
StringBuilder str = new StringBuilder();
for (StackTraceElement e : stackTrace) {
str.append(e.toString() + "\n");
}
return str.toString();
}
/**
* From a list of command-line arguments, remove both an option and the
* next argument.
*
* @param name Name of the option to remove. Example: -foo.
* @param args List of arguments.
* @return null if the option was not found; the value of the
* option otherwise.
* @throws IllegalArgumentException if the option's argument is not present
*/
public static String popOptionWithArgument(String name, List<String> args)
throws IllegalArgumentException {
String val = null;
for (Iterator<String> iter = args.iterator(); iter.hasNext(); ) {
String cur = iter.next();
if (cur.equals("--")) {
// stop parsing arguments when you see --
break;
} else if (cur.equals(name)) {
iter.remove();
if (!iter.hasNext()) {
throw new IllegalArgumentException("option " + name + " requires 1 " +
"argument.");
}
val = iter.next();
iter.remove();
break;
}
}
return val;
}
/**
* From a list of command-line arguments, remove an option.
*
* @param name Name of the option to remove. Example: -foo.
* @param args List of arguments.
* @return true if the option was found and removed; false otherwise.
*/
public static boolean popOption(String name, List<String> args) {
for (Iterator<String> iter = args.iterator(); iter.hasNext(); ) {
String cur = iter.next();
if (cur.equals("--")) {
// stop parsing arguments when you see --
break;
} else if (cur.equals(name)) {
iter.remove();
return true;
}
}
return false;
}
/**
* From a list of command-line arguments, return the first non-option
* argument. Non-option arguments are those which either come after
* a double dash (--) or do not start with a dash.
*
* @param args List of arguments.
* @return The first non-option argument, or null if there were none.
*/
public static String popFirstNonOption(List<String> args) {
for (Iterator<String> iter = args.iterator(); iter.hasNext(); ) {
String cur = iter.next();
if (cur.equals("--")) {
if (!iter.hasNext()) {
return null;
}
cur = iter.next();
iter.remove();
return cur;
} else if (!cur.startsWith("-")) {
iter.remove();
return cur;
}
}
return null;
}
}
| supermy/nutch2 | src/java/org/apache/hadoop/util/StringUtils.java | Java | apache-2.0 | 32,124 |
<?php
/**
* This file is part of the SevenShores/NetSuite library
* AND originally from the NetSuite PHP Toolkit.
*
* New content:
* @package ryanwinchester/netsuite-php
* @copyright Copyright (c) Ryan Winchester
* @license http://www.apache.org/licenses/LICENSE-2.0 Apache-2.0
* @link https://github.com/ryanwinchester/netsuite-php
*
* Original content:
* @copyright Copyright (c) NetSuite Inc.
* @license https://raw.githubusercontent.com/ryanwinchester/netsuite-php/master/original/NetSuite%20Application%20Developer%20License%20Agreement.txt
* @link http://www.netsuite.com/portal/developers/resources/suitetalk-sample-applications.shtml
*
* generated: 2020-04-10 09:56:55 PM UTC
*/
namespace NetSuite\Classes;
class VendorPaymentApplyList {
/**
* @var \NetSuite\Classes\VendorPaymentApply[]
*/
public $apply;
/**
* @var boolean
*/
public $replaceAll;
static $paramtypesmap = array(
"apply" => "VendorPaymentApply[]",
"replaceAll" => "boolean",
);
}
| RyanWinchester/netsuite-php | src/Classes/VendorPaymentApplyList.php | PHP | apache-2.0 | 1,056 |
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package software.amazon.awssdk.http.apache.internal.conn;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSocket;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
class SdkTlsSocketFactoryTest {
SdkTlsSocketFactory factory;
SSLSocket socket;
@BeforeEach
public void before() throws Exception {
factory = new SdkTlsSocketFactory(SSLContext.getDefault(), null);
socket = Mockito.mock(SSLSocket.class);
}
@Test
void nullProtocols() {
when(socket.getSupportedProtocols()).thenReturn(null);
when(socket.getEnabledProtocols()).thenReturn(null);
factory.prepareSocket(socket);
verify(socket, never()).setEnabledProtocols(any());
}
@Test
void amazonCorretto_8_0_292_defaultEnabledProtocols() {
when(socket.getSupportedProtocols()).thenReturn(new String[] {
"TLSv1.3", "TLSv1.2", "TLSv1.1", "TLSv1", "SSLv3", "SSLv2Hello"
});
when(socket.getEnabledProtocols()).thenReturn(new String[] {
"TLSv1.2", "TLSv1.1", "TLSv1"
});
factory.prepareSocket(socket);
verify(socket, never()).setEnabledProtocols(any());
}
@Test
void amazonCorretto_11_0_08_defaultEnabledProtocols() {
when(socket.getSupportedProtocols()).thenReturn(new String[] {
"TLSv1.3", "TLSv1.2", "TLSv1.1", "TLSv1", "SSLv3", "SSLv2Hello"
});
when(socket.getEnabledProtocols()).thenReturn(new String[] {
"TLSv1.3", "TLSv1.2", "TLSv1.1", "TLSv1"
});
factory.prepareSocket(socket);
verify(socket, never()).setEnabledProtocols(any());
}
@Test
void amazonCorretto_17_0_1_defaultEnabledProtocols() {
when(socket.getSupportedProtocols()).thenReturn(new String[] {
"TLSv1.3", "TLSv1.2", "TLSv1.1", "TLSv1", "SSLv3", "SSLv2Hello"
});
when(socket.getEnabledProtocols()).thenReturn(new String[] {
"TLSv1.3", "TLSv1.2"
});
factory.prepareSocket(socket);
verify(socket, never()).setEnabledProtocols(any());
}
}
| aws/aws-sdk-java-v2 | http-clients/apache-client/src/test/java/software/amazon/awssdk/http/apache/internal/conn/SdkTlsSocketFactoryTest.java | Java | apache-2.0 | 2,906 |
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
@Path("/arearect")
public class AreaOfRect {
@GET
@Produces("application/xml")
public String rectanglearea() {
Double sideA = 10.0;
Double sideB = 5.0;
Double area;
area = (sideA * sideB);
String result = "@Produces(\"application/xml\") \n Area of Rectangle: " + area;
return "<areaofrectangle>" + "<sideA>" + sideA + "</sideA>"+ "<sideB>" + sideB + "</sideB>" + "<area>" + result + "</area>" + "</areaofrectangle>";
}
@Path("/{l}/{b}")
@GET
@Produces("application/xml")
public String rectangleareainput(@PathParam("l") Double l, @PathParam("b") Double b){
Double area;
Double sideA =l;
Double sideB =b;
area = (sideA * sideB);
String result = "@Produces(\"application/xml\") \n Area of Rectangle: " + area;
return "<areaofrectangle>" + "<sideA>" + sideA + "</sideA>"+ "<sideB>" + sideB + "</sideB>" + "<area>" + result + "</area>" + "</areaofrectangle>";
}
} | ankiw/Lab-Assignment-ASE | LAB8/Source/src/AreaOfRect.java | Java | apache-2.0 | 1,036 |
package com.ibm.smartnurse.crawler;
import static org.junit.Assert.*;
import org.junit.Test;
import com.google.gson.JsonObject;
public class TestCrawler {
@Test
public void testGetInfoFromTJK() throws Exception {
CrawlerManage.INSTANCE.crawler();
}
@Test
public void testGetPageInfo() {
}
}
| anphoenix/data_crawler_generic | src/test/java/com/ibm/smartnurse/crawler/TestCrawler.java | Java | apache-2.0 | 309 |
package org.bakasoft.dbchord.schema;
import java.util.List;
import org.bakasoft.dbchord.ApplicationException;
public class Table extends NamedElement<Schema> {
private final Container<Table, Column> columns;
private PrimaryKey primaryKey;
private final Container<Table, UniqueKey> uniqueKeys;
public Table() {
columns = new Container<>(this);
uniqueKeys = new Container<>(this);
}
public void add(final Column column) {
columns.add(column);
}
public void addUniqueKey(final UniqueKey uniqueKey) {
if (uniqueKey.getTable() != this) {
throw new ApplicationException("unique key is not of this table");
}
uniqueKeys.add(uniqueKey);
}
public Column getColumn(final String name) {
return columns.getEntity(name);
}
public List<Column> getColumns() {
return columns.getEntities();
}
public PrimaryKey getPrimaryKey() {
return primaryKey;
}
public List<UniqueKey> getUniqueKeys() {
return uniqueKeys.getEntities();
}
public void setPrimaryKey(final PrimaryKey primaryKey) {
if (primaryKey.getTable() != this) {
throw new ApplicationException("primary key is not of this table");
}
this.primaryKey = primaryKey;
}
@Override
public String toString() {
return getName();
}
}
| bakasoft/dbchord | dbchord/src/org/bakasoft/dbchord/schema/Table.java | Java | apache-2.0 | 1,237 |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.Serialization;
using System.Text;
using System.Threading.Tasks;
using Windows.ApplicationModel;
using Windows.Storage;
using Windows.Storage.Streams;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
namespace StoryTeller.Common
{
/// <summary>
/// SuspensionManager captures global session state to simplify process lifetime management
/// for an application. Note that session state will be automatically cleared under a variety
/// of conditions and should only be used to store information that would be convenient to
/// carry across sessions, but that should be discarded when an application crashes or is
/// upgraded.
/// </summary>
internal sealed class SuspensionManager
{
private static Dictionary<string, object> _sessionState = new Dictionary<string, object>();
private static List<Type> _knownTypes = new List<Type>();
private const string sessionStateFilename = "_sessionState.xml";
/// <summary>
/// Provides access to global session state for the current session. This state is
/// serialized by <see cref="SaveAsync"/> and restored by
/// <see cref="RestoreAsync"/>, so values must be serializable by
/// <see cref="DataContractSerializer"/> and should be as compact as possible. Strings
/// and other self-contained data types are strongly recommended.
/// </summary>
public static Dictionary<string, object> SessionState
{
get { return _sessionState; }
}
/// <summary>
/// List of custom types provided to the <see cref="DataContractSerializer"/> when
/// reading and writing session state. Initially empty, additional types may be
/// added to customize the serialization process.
/// </summary>
public static List<Type> KnownTypes
{
get { return _knownTypes; }
}
/// <summary>
/// Save the current <see cref="SessionState"/>. Any <see cref="Frame"/> instances
/// registered with <see cref="RegisterFrame"/> will also preserve their current
/// navigation stack, which in turn gives their active <see cref="Page"/> an opportunity
/// to save its state.
/// </summary>
/// <returns>An asynchronous task that reflects when session state has been saved.</returns>
public static async Task SaveAsync()
{
try
{
// Save the navigation state for all registered frames
foreach (var weakFrameReference in _registeredFrames)
{
Frame frame;
if (weakFrameReference.TryGetTarget(out frame))
{
SaveFrameNavigationState(frame);
}
}
// Serialize the session state synchronously to avoid asynchronous access to shared
// state
MemoryStream sessionData = new MemoryStream();
DataContractSerializer serializer = new DataContractSerializer(typeof(Dictionary<string, object>), _knownTypes);
serializer.WriteObject(sessionData, _sessionState);
// Get an output stream for the SessionState file and write the state asynchronously
StorageFile file = await ApplicationData.Current.LocalFolder.CreateFileAsync(sessionStateFilename, CreationCollisionOption.ReplaceExisting);
using (Stream fileStream = await file.OpenStreamForWriteAsync())
{
sessionData.Seek(0, SeekOrigin.Begin);
await sessionData.CopyToAsync(fileStream);
}
}
catch (Exception e)
{
throw new SuspensionManagerException(e);
}
}
/// <summary>
/// Restores previously saved <see cref="SessionState"/>. Any <see cref="Frame"/> instances
/// registered with <see cref="RegisterFrame"/> will also restore their prior navigation
/// state, which in turn gives their active <see cref="Page"/> an opportunity restore its
/// state.
/// </summary>
/// <param name="sessionBaseKey">An optional key that identifies the type of session.
/// This can be used to distinguish between multiple application launch scenarios.</param>
/// <returns>An asynchronous task that reflects when session state has been read. The
/// content of <see cref="SessionState"/> should not be relied upon until this task
/// completes.</returns>
public static async Task RestoreAsync(String sessionBaseKey = null)
{
_sessionState = new Dictionary<String, Object>();
try
{
// Get the input stream for the SessionState file
StorageFile file = await ApplicationData.Current.LocalFolder.GetFileAsync(sessionStateFilename);
using (IInputStream inStream = await file.OpenSequentialReadAsync())
{
// Deserialize the Session State
DataContractSerializer serializer = new DataContractSerializer(typeof(Dictionary<string, object>), _knownTypes);
_sessionState = (Dictionary<string, object>)serializer.ReadObject(inStream.AsStreamForRead());
}
// Restore any registered frames to their saved state
foreach (var weakFrameReference in _registeredFrames)
{
Frame frame;
if (weakFrameReference.TryGetTarget(out frame) && (string)frame.GetValue(FrameSessionBaseKeyProperty) == sessionBaseKey)
{
frame.ClearValue(FrameSessionStateProperty);
RestoreFrameNavigationState(frame);
}
}
}
catch (Exception e)
{
throw new SuspensionManagerException(e);
}
}
private static DependencyProperty FrameSessionStateKeyProperty =
DependencyProperty.RegisterAttached("_FrameSessionStateKey", typeof(String), typeof(SuspensionManager), null);
private static DependencyProperty FrameSessionBaseKeyProperty =
DependencyProperty.RegisterAttached("_FrameSessionBaseKeyParams", typeof(String), typeof(SuspensionManager), null);
private static DependencyProperty FrameSessionStateProperty =
DependencyProperty.RegisterAttached("_FrameSessionState", typeof(Dictionary<String, Object>), typeof(SuspensionManager), null);
private static List<WeakReference<Frame>> _registeredFrames = new List<WeakReference<Frame>>();
/// <summary>
/// Registers a <see cref="Frame"/> instance to allow its navigation history to be saved to
/// and restored from <see cref="SessionState"/>. Frames should be registered once
/// immediately after creation if they will participate in session state management. Upon
/// registration if state has already been restored for the specified key
/// the navigation history will immediately be restored. Subsequent invocations of
/// <see cref="RestoreAsync"/> will also restore navigation history.
/// </summary>
/// <param name="frame">An instance whose navigation history should be managed by
/// <see cref="SuspensionManager"/></param>
/// <param name="sessionStateKey">A unique key into <see cref="SessionState"/> used to
/// store navigation-related information.</param>
/// <param name="sessionBaseKey">An optional key that identifies the type of session.
/// This can be used to distinguish between multiple application launch scenarios.</param>
public static void RegisterFrame(Frame frame, String sessionStateKey, String sessionBaseKey = null)
{
if (frame.GetValue(FrameSessionStateKeyProperty) != null)
{
throw new InvalidOperationException("Frames can only be registered to one session state key");
}
if (frame.GetValue(FrameSessionStateProperty) != null)
{
throw new InvalidOperationException("Frames must be either be registered before accessing frame session state, or not registered at all");
}
if (!string.IsNullOrEmpty(sessionBaseKey))
{
frame.SetValue(FrameSessionBaseKeyProperty, sessionBaseKey);
sessionStateKey = sessionBaseKey + "_" + sessionStateKey;
}
// Use a dependency property to associate the session key with a frame, and keep a list of frames whose
// navigation state should be managed
frame.SetValue(FrameSessionStateKeyProperty, sessionStateKey);
_registeredFrames.Add(new WeakReference<Frame>(frame));
// Check to see if navigation state can be restored
RestoreFrameNavigationState(frame);
}
/// <summary>
/// Disassociates a <see cref="Frame"/> previously registered by <see cref="RegisterFrame"/>
/// from <see cref="SessionState"/>. Any navigation state previously captured will be
/// removed.
/// </summary>
/// <param name="frame">An instance whose navigation history should no longer be
/// managed.</param>
public static void UnregisterFrame(Frame frame)
{
// Remove session state and remove the frame from the list of frames whose navigation
// state will be saved (along with any weak references that are no longer reachable)
SessionState.Remove((String)frame.GetValue(FrameSessionStateKeyProperty));
_registeredFrames.RemoveAll((weakFrameReference) =>
{
Frame testFrame;
return !weakFrameReference.TryGetTarget(out testFrame) || testFrame == frame;
});
}
/// <summary>
/// Provides storage for session state associated with the specified <see cref="Frame"/>.
/// Frames that have been previously registered with <see cref="RegisterFrame"/> have
/// their session state saved and restored automatically as a part of the global
/// <see cref="SessionState"/>. Frames that are not registered have transient state
/// that can still be useful when restoring pages that have been discarded from the
/// navigation cache.
/// </summary>
/// <remarks>Apps may choose to rely on <see cref="NavigationHelper"/> to manage
/// page-specific state instead of working with frame session state directly.</remarks>
/// <param name="frame">The instance for which session state is desired.</param>
/// <returns>A collection of state subject to the same serialization mechanism as
/// <see cref="SessionState"/>.</returns>
public static Dictionary<String, Object> SessionStateForFrame(Frame frame)
{
var frameState = (Dictionary<String, Object>)frame.GetValue(FrameSessionStateProperty);
if (frameState == null)
{
var frameSessionKey = (String)frame.GetValue(FrameSessionStateKeyProperty);
if (frameSessionKey != null)
{
// Registered frames reflect the corresponding session state
if (!_sessionState.ContainsKey(frameSessionKey))
{
_sessionState[frameSessionKey] = new Dictionary<String, Object>();
}
frameState = (Dictionary<String, Object>)_sessionState[frameSessionKey];
}
else
{
// Frames that aren't registered have transient state
frameState = new Dictionary<String, Object>();
}
frame.SetValue(FrameSessionStateProperty, frameState);
}
return frameState;
}
private static void RestoreFrameNavigationState(Frame frame)
{
var frameState = SessionStateForFrame(frame);
if (frameState.ContainsKey("Navigation"))
{
frame.SetNavigationState((String)frameState["Navigation"]);
}
}
private static void SaveFrameNavigationState(Frame frame)
{
var frameState = SessionStateForFrame(frame);
frameState["Navigation"] = frame.GetNavigationState();
}
}
public class SuspensionManagerException : Exception
{
public SuspensionManagerException()
{
}
public SuspensionManagerException(Exception e)
: base("SuspensionManager failed", e)
{
}
}
}
| kasparov/StoryTeller | StoryTeller/Common/SuspensionManager.cs | C# | apache-2.0 | 13,050 |
<?php
/*-----------------------------------------------------------------
!!!!警告!!!!
以下为系统文件,请勿修改
-----------------------------------------------------------------*/
namespace ginkgo;
// 不能非法包含或直接执行
if (!defined('IN_GINKGO')) {
return 'Access denied';
}
// 语言处理类
class Lang {
public $lang; // 语言数据
public $config = array(); // 语言配置
public $current; // 当前语言
public $clientLang; // 客户端语言
public $range = ''; // 作用域
protected static $instance; // 当前实例
private $configThis = array( //语言
'switch' => false, //语言开关
'default' => 'zh_CN', //默认语言
);
// 构造函数
protected function __construct($config = array()) {
$this->config($config);
$this->getCurrent(); // 获取当前语言
$this->init(); // 初始化
}
protected function __clone() { }
/** 实例化
* instance function.
*
* @access public
* @static
* @return 当前类的实例
*/
public static function instance($config = array()) {
if (Func::isEmpty(self::$instance)) {
self::$instance = new static($config);
}
return self::$instance;
}
// 配置 since 0.2.0
public function config($config = array()) {
$_arr_config = Config::get('lang'); // 取得图片配置
$_arr_configDo = $this->configThis;
if (is_array($_arr_config) && Func::notEmpty($_arr_config)) {
$_arr_configDo = array_replace_recursive($_arr_configDo, $_arr_config); // 合并配置
}
if (is_array($this->config) && Func::notEmpty($this->config)) {
$_arr_configDo = array_replace_recursive($_arr_configDo, $this->config); // 合并配置
}
if (is_array($config) && Func::notEmpty($config)) {
$_arr_configDo = array_replace_recursive($_arr_configDo, $config); // 合并配置
}
$this->config = $_arr_configDo;
}
/** 设置, 获取作用域
* range function.
*
* @access public
* @param string $range (default: '') 作用域
* @return 如果参数为空则返回当前作用域, 否则无返回
*/
public function range($range = '') {
if (Func::isEmpty($range)) {
return $this->range;
} else {
$this->range = $range;
}
}
/** 获取当前语言
* getCurrent function.
*
* @access public
* @param bool $lower (default: false) 是否转换为小写
* @param string $separator (default: '') 语言、国家分隔符
* @param bool $client (default: false) 是否以客户端语言为准
* @return void
*/
public function getCurrent($lower = false, $separator = '', $client = false) {
if ($client) {
$_str_current = $this->clientLang;
} else {
$_str_current = $this->current;
}
if ($lower === true) {
$_str_current = strtolower($_str_current);
}
if ((Func::notEmpty($separator) && is_string($separator)) || $separator == '-') {
$_str_current = str_replace('_', $separator, $_str_current);
}
return $_str_current;
}
/** 设置当前语言
* setCurrent function.
*
* @access public
* @param string $lang (default: '') 语言代码
* @return void
*/
public function setCurrent($lang = '') {
$this->current = $lang;
}
/** 添加变量 (冲突不覆盖)
* add function.
*
* @access public
* @param string $name 变量名
* @param string $value (default: '') 值
* @param string $range (default: '') 作用域
* @return void
*/
public function add($name, $value = '', $range = '') {
$_mix_range = $this->rangeProcess($range);
if (Func::isEmpty($_mix_range)) {
if (!isset($this->lang[$name])) {
$this->lang[$name] = $value;
}
} else if (is_array($_mix_range)) {
if (isset($_mix_range[1])) {
if (!isset($this->lang[$_mix_range[0]][$_mix_range[1]][$name])) {
$this->lang[$_mix_range[0]][$_mix_range[1]][$name] = $value;
}
} else if (isset($_mix_range[0])) {
if (!isset($this->lang[$_mix_range[0]][$name])) {
$this->lang[$_mix_range[0]][$name] = $value;
}
}
} else if (is_string($_mix_range)) {
if (!isset($this->lang[$_mix_range][$name])) {
$this->lang[$_mix_range][$name] = $value;
}
}
}
/** 设置变量 (冲突覆盖)
* set function.
*
* @access public
* @param mixed $name 变量名
* @param string $value (default: '') 值
* @param string $range (default: '') 作用域
* @return void
*/
public function set($name, $value = '', $range = '') { //设置语言字段
$_mix_range = $this->rangeProcess($range);
/*print_r($name);
print_r('<br>');*/
if (Func::isEmpty($_mix_range)) {
if (is_array($name)) {
$this->lang = array_replace_recursive($this->lang, $name);
} else if (is_string($name)) {
if (isset($this->lang[$name]) && is_array($this->lang[$name]) && is_array($value)) {
$this->lang[$name] = array_replace_recursive($this->lang[$name], $value);
} else {
$this->lang[$name] = $value;
}
}
} else if (is_array($_mix_range)) {
if (is_array($name)) {
if (isset($_mix_range[1])) {
if (isset($this->lang[$_mix_range[0]][$_mix_range[1]]) && is_array($this->lang[$_mix_range[0]][$_mix_range[1]])) {
$this->lang[$_mix_range[0]][$_mix_range[1]] = array_replace_recursive($this->lang[$_mix_range[0]][$_mix_range[1]], $name);
} else {
$this->lang[$_mix_range[0]][$_mix_range[1]] = $name;
}
} else if (isset($_mix_range[0])) {
if (isset($this->lang[$_mix_range[0]]) && is_array($this->lang[$_mix_range[0]])) {
$this->lang[$_mix_range[0]] = array_replace_recursive($this->lang[$_mix_range[0]], $name);
} else {
$this->lang[$_mix_range[0]] = $name;
}
}
} else if (is_string($name)) {
if (isset($_mix_range[1])) {
if (isset($this->lang[$_mix_range[0]][$_mix_range[1]][$name]) && is_array($this->lang[$_mix_range[0]][$_mix_range[1]][$name]) && is_array($value)) {
$this->lang[$_mix_range[0]][$_mix_range[1]][$name] = array_replace_recursive($this->lang[$_mix_range[0]][$_mix_range[1]][$name], $value);
} else {
$this->lang[$_mix_range[0]][$_mix_range[1]][$name] = $value;
}
} else if (isset($_mix_range[0])) {
if (isset($this->lang[$_mix_range[0]][$name]) && is_array($this->lang[$_mix_range[0]][$name]) && is_array($value)) {
$this->lang[$_mix_range[0]][$name] = array_replace_recursive($this->lang[$_mix_range[0]][$name], $value);
} else {
$this->lang[$_mix_range[0]][$name] = $value;
}
}
}
} else if (is_string($_mix_range)) {
if (is_array($name)) {
if (isset($this->lang[$_mix_range]) && is_array($this->lang[$_mix_range])) {
$this->lang[$_mix_range] = array_replace_recursive($this->lang[$_mix_range], $name);
} else {
$this->lang[$_mix_range] = $name;
}
} else {
if (isset($this->lang[$_mix_range][$name]) && is_array($this->lang[$_mix_range][$name]) && is_array($value)) {
$this->lang[$_mix_range][$name] = array_replace_recursive($this->lang[$_mix_range][$name], $value);
} else {
$this->lang[$_mix_range][$name] = $value;
}
}
}
//print_r($this->lang);
}
/** 获取语言变量
* get function.
*
* @access public
* @param string $name 变量名
* @param string $range (default: '') 作用域
* @param array $replace (default: array()) 输出替换
* @param bool $show_src (default: true) 如果变量不存在, 是否显示变量名
* @return void
*/
public function get($name, $range = '', $replace = array(), $show_src = true) { //获取语言字段
$name = (string)$name;
$_mix_range = $this->rangeProcess($range);
/*print_r($name);
print_r(' ||| ');
print_r($_mix_range);
print_r('<br>');*/
if ($show_src) {
$_str_return = $name;
} else {
$_str_return = '';
}
if (Func::isEmpty($_mix_range)) {
if (isset($this->lang[$name])) {
$_str_return = $this->lang[$name];
}
} else if (is_array($_mix_range)) {
if (isset($_mix_range[1])) {
if (isset($this->lang[$_mix_range[0]][$_mix_range[1]][$name])) {
$_str_return = $this->lang[$_mix_range[0]][$_mix_range[1]][$name];
}
} else if (isset($_mix_range[0])) {
if (isset($this->lang[$_mix_range[0]][$name])) {
$_str_return = $this->lang[$_mix_range[0]][$name];
}
}
} else if (is_string($_mix_range)) {
if (isset($this->lang[$_mix_range][$name])) {
$_str_return = $this->lang[$_mix_range][$name];
}
}
if (is_array($replace) && Func::notEmpty($replace)) {
$_arr_replace = array_keys($replace);
foreach ($_arr_replace as $_key=>&$_value) {
$_value = '{:' . $_value . '}';
}
$_str_return = str_replace($_arr_replace, $replace, $_str_return);
}
return $_str_return;
}
/** 载入语言包
* load function.
*
* @access public
* @param string $path 路径
* @param string $range (default: '') 作用域
* @return void
*/
public function load($path, $range = '') {
$_arr_lang = array();
if (File::fileHas($path)) {
$_arr_lang = Loader::load($path, 'include');
}
/*print_r($range);
print_r('<br>');*/
$this->set($_arr_lang, '', $range); // 设置变量
return $_arr_lang;
}
/** 初始化
* init function.
*
* @access private
* @return void
*/
private function init() {
$_str_current = $this->current;
if ($this->config['switch'] === true || $this->config['switch'] === 'true') { // 语言开关为开
if (isset($_SERVER['HTTP_ACCEPT_LANGUAGE'])) {
$this->clientLang = $_SERVER['HTTP_ACCEPT_LANGUAGE'];
}
}
if (Func::isEmpty($_str_current)) {
$_str_current = $this->config['default'];
}
if (function_exists('mb_internal_encoding')) {
mb_internal_encoding('UTF-8'); // 设置内部字符编码
}
setlocale(LC_ALL, $_str_current . '.UTF-8'); // 设置区域格式,主要针对 csv 处理
$this->current = $_str_current;
$_str_pathSys = GK_PATH_LANG . $_str_current . GK_EXT_LANG;
if (File::fileHas($_str_pathSys)) {
$this->lang['__ginkgo__'] = Loader::load($_str_pathSys, 'include'); // 载入框架语言包
}
}
/** 作用域处理
* rangeProcess function.
*
* @access private
* @static
* @param string $range (default: '') 作用域
* @return 作用域数组
*/
private function rangeProcess($range) {
if (Func::isEmpty($range)) {
$_str_range = $this->range;
} else {
$_str_range = $range;
}
if (!is_string($_str_range)) {
$_str_range = '';
}
$_mix_return = '';
if (strpos($_str_range, '.')) {
$_mix_return = explode('.', $_str_range);
} else {
$_mix_return = $_str_range;
}
return $_mix_return;
}
}
| baigoStudio/baigoADS | ginkgo/core/lang.class.php | PHP | apache-2.0 | 11,265 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.physical.impl.join;
import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
import org.apache.drill.common.map.CaseInsensitiveMap;
import org.apache.drill.exec.record.RecordBatch;
import org.junit.Assert;
import org.junit.Test;
public class TestBuildSidePartitioningImpl {
@Test
public void testSimpleReserveMemoryCalculationNoHashFirstCycle() {
testSimpleReserveMemoryCalculationNoHashHelper(true);
}
@Test
public void testSimpleReserveMemoryCalculationNoHashNotFirstCycle() {
testSimpleReserveMemoryCalculationNoHashHelper(false);
}
private void testSimpleReserveMemoryCalculationNoHashHelper(final boolean firstCycle) {
final int maxBatchNumRecords = 20;
final double fragmentationFactor = 2.0;
final double safetyFactor = 1.5;
final HashJoinMemoryCalculatorImpl.BuildSidePartitioningImpl calc =
new HashJoinMemoryCalculatorImpl.BuildSidePartitioningImpl(
BatchSizePredictorImpl.Factory.INSTANCE,
new HashTableSizeCalculatorConservativeImpl(RecordBatch.MAX_BATCH_ROW_COUNT, HashTableSizeCalculatorConservativeImpl.HASHTABLE_DOUBLING_FACTOR),
HashJoinHelperSizeCalculatorImpl.INSTANCE,
fragmentationFactor,
safetyFactor, false);
final CaseInsensitiveMap<Long> keySizes = CaseInsensitiveMap.newHashMap();
final long accountedProbeBatchSize = firstCycle? 0: 10;
calc.initialize(firstCycle,
false,
keySizes,
190 + accountedProbeBatchSize,
2,
false,
new MockBatchSizePredictor(20, 20, fragmentationFactor, safetyFactor),
new MockBatchSizePredictor(10, 10, fragmentationFactor, safetyFactor),
10,
5,
maxBatchNumRecords,
maxBatchNumRecords,
16000,
.75);
final HashJoinMemoryCalculator.PartitionStatSet partitionStatSet =
new HashJoinMemoryCalculator.PartitionStatSet(new PartitionStatImpl(), new PartitionStatImpl());
calc.setPartitionStatSet(partitionStatSet);
long expectedReservedMemory = 60 // Max incoming batch size
+ 2 * 30 // build side batch for each spilled partition
+ accountedProbeBatchSize; // Max incoming probe batch size
long actualReservedMemory = calc.getBuildReservedMemory();
Assert.assertEquals(expectedReservedMemory, actualReservedMemory);
Assert.assertEquals(2, calc.getNumPartitions());
}
@Test
public void testSimpleReserveMemoryCalculationHash() {
final int maxBatchNumRecords = 20;
final double fragmentationFactor = 2.0;
final double safetyFactor = 1.5;
final HashJoinMemoryCalculatorImpl.BuildSidePartitioningImpl calc =
new HashJoinMemoryCalculatorImpl.BuildSidePartitioningImpl(
BatchSizePredictorImpl.Factory.INSTANCE,
new HashTableSizeCalculatorConservativeImpl(RecordBatch.MAX_BATCH_ROW_COUNT, HashTableSizeCalculatorConservativeImpl.HASHTABLE_DOUBLING_FACTOR),
HashJoinHelperSizeCalculatorImpl.INSTANCE,
fragmentationFactor,
safetyFactor, false);
final CaseInsensitiveMap<Long> keySizes = CaseInsensitiveMap.newHashMap();
calc.initialize(false,
true,
keySizes,
350,
2,
false,
new MockBatchSizePredictor(20, 20, fragmentationFactor, safetyFactor),
new MockBatchSizePredictor(10, 10, fragmentationFactor, safetyFactor),
10,
5,
maxBatchNumRecords,
maxBatchNumRecords,
16000,
.75);
final HashJoinMemoryCalculator.PartitionStatSet partitionStatSet =
new HashJoinMemoryCalculator.PartitionStatSet(new PartitionStatImpl(), new PartitionStatImpl());
calc.setPartitionStatSet(partitionStatSet);
long expectedReservedMemory = 60 // Max incoming batch size
+ 2 * (/* data size for batch */ 30 + /* Space reserved for hash value vector */ 10 * 4 * 2) // build side batch for each spilled partition
+ 10; // Max incoming probe batch size
long actualReservedMemory = calc.getBuildReservedMemory();
Assert.assertEquals(expectedReservedMemory, actualReservedMemory);
Assert.assertEquals(2, calc.getNumPartitions());
}
@Test
public void testAdjustInitialPartitions() {
final int maxBatchNumRecords = 20;
final double fragmentationFactor = 2.0;
final double safetyFactor = 1.5;
final HashJoinMemoryCalculatorImpl.BuildSidePartitioningImpl calc =
new HashJoinMemoryCalculatorImpl.BuildSidePartitioningImpl(
BatchSizePredictorImpl.Factory.INSTANCE,
new HashTableSizeCalculatorConservativeImpl(RecordBatch.MAX_BATCH_ROW_COUNT, HashTableSizeCalculatorConservativeImpl.HASHTABLE_DOUBLING_FACTOR),
HashJoinHelperSizeCalculatorImpl.INSTANCE,
fragmentationFactor,
safetyFactor, false);
final CaseInsensitiveMap<Long> keySizes = CaseInsensitiveMap.newHashMap();
calc.initialize(
true,
false,
keySizes,
200,
4,
false,
new MockBatchSizePredictor(20, 20, fragmentationFactor, safetyFactor),
new MockBatchSizePredictor(10, 10, fragmentationFactor, safetyFactor),
10,
5,
maxBatchNumRecords,
maxBatchNumRecords,
16000,
.75);
final HashJoinMemoryCalculator.PartitionStatSet partitionStatSet =
new HashJoinMemoryCalculator.PartitionStatSet(new PartitionStatImpl(), new PartitionStatImpl(),
new PartitionStatImpl(), new PartitionStatImpl());
calc.setPartitionStatSet(partitionStatSet);
long expectedReservedMemory = 60 // Max incoming batch size
+ 2 * 30; // build side batch for each spilled partition
long actualReservedMemory = calc.getBuildReservedMemory();
Assert.assertEquals(expectedReservedMemory, actualReservedMemory);
Assert.assertEquals(2, calc.getNumPartitions());
}
@Test
public void testDontAdjustInitialPartitions() {
final int maxBatchNumRecords = 20;
final double fragmentationFactor = 2.0;
final double safetyFactor = 1.5;
final HashJoinMemoryCalculatorImpl.BuildSidePartitioningImpl calc =
new HashJoinMemoryCalculatorImpl.BuildSidePartitioningImpl(
BatchSizePredictorImpl.Factory.INSTANCE,
new HashTableSizeCalculatorConservativeImpl(RecordBatch.MAX_BATCH_ROW_COUNT, HashTableSizeCalculatorConservativeImpl.HASHTABLE_DOUBLING_FACTOR),
HashJoinHelperSizeCalculatorImpl.INSTANCE,
fragmentationFactor,
safetyFactor, false);
final CaseInsensitiveMap<Long> keySizes = CaseInsensitiveMap.newHashMap();
calc.initialize(
false,
false,
keySizes,
200,
4,
false,
new MockBatchSizePredictor(20, 20, fragmentationFactor, safetyFactor),
new MockBatchSizePredictor(10, 10, fragmentationFactor, safetyFactor),
10,
5,
maxBatchNumRecords,
maxBatchNumRecords,
16000,
.75);
final HashJoinMemoryCalculator.PartitionStatSet partitionStatSet =
new HashJoinMemoryCalculator.PartitionStatSet(new PartitionStatImpl(), new PartitionStatImpl(),
new PartitionStatImpl(), new PartitionStatImpl());
calc.setPartitionStatSet(partitionStatSet);
long expectedReservedMemory = 60 // Max incoming batch size
+ 4 * 30 // build side batch for each spilled partition
+ 10; // Max incoming probe batch size
long actualReservedMemory = calc.getBuildReservedMemory();
Assert.assertEquals(expectedReservedMemory, actualReservedMemory);
Assert.assertEquals(4, calc.getNumPartitions());
}
@Test(expected = IllegalStateException.class)
public void testHasDataProbeEmpty() {
final int maxBatchNumRecords = 20;
final double fragmentationFactor = 2.0;
final double safetyFactor = 1.5;
final HashJoinMemoryCalculatorImpl.BuildSidePartitioningImpl calc =
new HashJoinMemoryCalculatorImpl.BuildSidePartitioningImpl(
BatchSizePredictorImpl.Factory.INSTANCE,
new HashTableSizeCalculatorConservativeImpl(RecordBatch.MAX_BATCH_ROW_COUNT, HashTableSizeCalculatorConservativeImpl.HASHTABLE_DOUBLING_FACTOR),
HashJoinHelperSizeCalculatorImpl.INSTANCE,
fragmentationFactor,
safetyFactor, false);
final CaseInsensitiveMap<Long> keySizes = CaseInsensitiveMap.newHashMap();
calc.initialize(
true,
false,
keySizes,
240,
4,
true,
new MockBatchSizePredictor(20, 20, fragmentationFactor, safetyFactor),
new MockBatchSizePredictor(10, 10, fragmentationFactor, safetyFactor),
10,
5,
maxBatchNumRecords,
maxBatchNumRecords,
16000,
.75);
}
@Test
public void testNoProbeDataForStats() {
final int maxBatchNumRecords = 20;
final double fragmentationFactor = 2.0;
final double safetyFactor = 1.5;
final HashJoinMemoryCalculatorImpl.BuildSidePartitioningImpl calc =
new HashJoinMemoryCalculatorImpl.BuildSidePartitioningImpl(
BatchSizePredictorImpl.Factory.INSTANCE,
new HashTableSizeCalculatorConservativeImpl(RecordBatch.MAX_BATCH_ROW_COUNT, HashTableSizeCalculatorConservativeImpl.HASHTABLE_DOUBLING_FACTOR),
HashJoinHelperSizeCalculatorImpl.INSTANCE,
fragmentationFactor,
safetyFactor, false);
final CaseInsensitiveMap<Long> keySizes = CaseInsensitiveMap.newHashMap();
calc.initialize(
true,
false,
keySizes,
240,
4,
false,
new MockBatchSizePredictor(20, 20, fragmentationFactor, safetyFactor),
new MockBatchSizePredictor(),
10,
5,
maxBatchNumRecords,
maxBatchNumRecords,
16000,
.75);
final HashJoinMemoryCalculator.PartitionStatSet partitionStatSet =
new HashJoinMemoryCalculator.PartitionStatSet(new PartitionStatImpl(), new PartitionStatImpl());
calc.setPartitionStatSet(partitionStatSet);
long expectedReservedMemory = 60 // Max incoming batch size
+ 4 * 30; // build side batch for each spilled partition
long actualReservedMemory = calc.getBuildReservedMemory();
Assert.assertEquals(expectedReservedMemory, actualReservedMemory);
Assert.assertEquals(4, calc.getNumPartitions());
}
@Test
public void testProbeEmpty() {
final int maxBatchNumRecords = 20;
final double fragmentationFactor = 2.0;
final double safetyFactor = 1.5;
final HashJoinMemoryCalculatorImpl.BuildSidePartitioningImpl calc =
new HashJoinMemoryCalculatorImpl.BuildSidePartitioningImpl(
BatchSizePredictorImpl.Factory.INSTANCE,
new HashTableSizeCalculatorConservativeImpl(RecordBatch.MAX_BATCH_ROW_COUNT, HashTableSizeCalculatorConservativeImpl.HASHTABLE_DOUBLING_FACTOR),
HashJoinHelperSizeCalculatorImpl.INSTANCE,
fragmentationFactor,
safetyFactor, false);
final CaseInsensitiveMap<Long> keySizes = CaseInsensitiveMap.newHashMap();
calc.initialize(
true,
false,
keySizes,
200,
4,
true,
new MockBatchSizePredictor(20, 20, fragmentationFactor, safetyFactor),
new MockBatchSizePredictor(),
10,
5,
maxBatchNumRecords,
maxBatchNumRecords,
16000,
.75);
final HashJoinMemoryCalculator.PartitionStatSet partitionStatSet =
new HashJoinMemoryCalculator.PartitionStatSet(new PartitionStatImpl(), new PartitionStatImpl(),
new PartitionStatImpl(), new PartitionStatImpl());
calc.setPartitionStatSet(partitionStatSet);
long expectedReservedMemory = 60 // Max incoming batch size
+ 4 * 30; // build side batch for each spilled partition
long actualReservedMemory = calc.getBuildReservedMemory();
Assert.assertEquals(expectedReservedMemory, actualReservedMemory);
Assert.assertEquals(4, calc.getNumPartitions());
}
@Test
public void testNoRoomInMemoryForBatch1FirstCycle() {
testNoRoomInMemoryForBatch1Helper(true);
}
@Test
public void testNoRoomInMemoryForBatch1NotFirstCycle() {
testNoRoomInMemoryForBatch1Helper(false);
}
private void testNoRoomInMemoryForBatch1Helper(final boolean firstCycle) {
final int maxBatchNumRecords = 20;
final double fragmentationFactor = 2.0;
final double safetyFactor = 1.5;
final long accountedProbeBatchSize = firstCycle? 0: 10;
final HashJoinMemoryCalculatorImpl.BuildSidePartitioningImpl calc =
new HashJoinMemoryCalculatorImpl.BuildSidePartitioningImpl(
BatchSizePredictorImpl.Factory.INSTANCE,
new HashTableSizeCalculatorConservativeImpl(RecordBatch.MAX_BATCH_ROW_COUNT, HashTableSizeCalculatorConservativeImpl.HASHTABLE_DOUBLING_FACTOR),
HashJoinHelperSizeCalculatorImpl.INSTANCE,
fragmentationFactor,
safetyFactor, false);
final CaseInsensitiveMap<Long> keySizes = CaseInsensitiveMap.newHashMap();
calc.initialize(
firstCycle,
false,
keySizes,
120 + accountedProbeBatchSize,
2,
false,
new MockBatchSizePredictor(20, 20, fragmentationFactor, safetyFactor),
new MockBatchSizePredictor(10, 10, fragmentationFactor, safetyFactor),
10,
5,
maxBatchNumRecords,
maxBatchNumRecords,
16000,
.75);
final PartitionStatImpl partition1 = new PartitionStatImpl();
final PartitionStatImpl partition2 = new PartitionStatImpl();
final HashJoinMemoryCalculator.PartitionStatSet partitionStatSet =
new HashJoinMemoryCalculator.PartitionStatSet(partition1, partition2);
calc.setPartitionStatSet(partitionStatSet);
long expectedReservedMemory = 60 // Max incoming batch size
+ 2 * 30 // build side batch for each spilled partition
+ accountedProbeBatchSize; // Max incoming probe batch size
long actualReservedMemory = calc.getBuildReservedMemory();
Assert.assertEquals(expectedReservedMemory, actualReservedMemory);
Assert.assertEquals(2, calc.getNumPartitions());
partition1.add(new HashJoinMemoryCalculator.BatchStat(10, 8));
Assert.assertTrue(calc.shouldSpill());
}
@Test
public void testCompleteLifeCycle() {
final int maxBatchNumRecords = 20;
final double fragmentationFactor = 2.0;
final double safetyFactor = 1.5;
final HashJoinMemoryCalculatorImpl.BuildSidePartitioningImpl calc =
new HashJoinMemoryCalculatorImpl.BuildSidePartitioningImpl(
BatchSizePredictorImpl.Factory.INSTANCE,
new HashTableSizeCalculatorConservativeImpl(RecordBatch.MAX_BATCH_ROW_COUNT, HashTableSizeCalculatorConservativeImpl.HASHTABLE_DOUBLING_FACTOR),
HashJoinHelperSizeCalculatorImpl.INSTANCE,
fragmentationFactor,
safetyFactor, false);
final CaseInsensitiveMap<Long> keySizes = CaseInsensitiveMap.newHashMap();
calc.initialize(
true,
false,
keySizes,
160,
2,
false,
new MockBatchSizePredictor(20, 20, fragmentationFactor, safetyFactor),
new MockBatchSizePredictor(10, 10, fragmentationFactor, safetyFactor),
10,
5,
maxBatchNumRecords,
maxBatchNumRecords,
16000,
.75);
final PartitionStatImpl partition1 = new PartitionStatImpl();
final PartitionStatImpl partition2 = new PartitionStatImpl();
final HashJoinMemoryCalculator.PartitionStatSet partitionStatSet =
new HashJoinMemoryCalculator.PartitionStatSet(partition1, partition2);
calc.setPartitionStatSet(partitionStatSet);
// Add to partition 1, no spill needed
{
partition1.add(new HashJoinMemoryCalculator.BatchStat(10, 7));
Assert.assertFalse(calc.shouldSpill());
}
// Add to partition 2, no spill needed
{
partition2.add(new HashJoinMemoryCalculator.BatchStat(10, 8));
Assert.assertFalse(calc.shouldSpill());
}
// Add to partition 1, and partition 1 spilled
{
partition1.add(new HashJoinMemoryCalculator.BatchStat(10, 8));
Assert.assertTrue(calc.shouldSpill());
partition1.spill();
}
// Add to partition 2, no spill needed
{
partition2.add(new HashJoinMemoryCalculator.BatchStat(10, 7));
Assert.assertFalse(calc.shouldSpill());
}
// Add to partition 2, and partition 2 spilled
{
partition2.add(new HashJoinMemoryCalculator.BatchStat(10, 8));
Assert.assertTrue(calc.shouldSpill());
partition2.spill();
}
Assert.assertNotNull(calc.next());
}
public static class MockBatchSizePredictor implements BatchSizePredictor {
private final boolean hasData;
private final long batchSize;
private final int numRecords;
private final double fragmentationFactor;
private final double safetyFactor;
public MockBatchSizePredictor() {
hasData = false;
batchSize = 0;
numRecords = 0;
fragmentationFactor = 0;
safetyFactor = 0;
}
public MockBatchSizePredictor(final long batchSize,
final int numRecords,
final double fragmentationFactor,
final double safetyFactor) {
hasData = true;
this.batchSize = batchSize;
this.numRecords = numRecords;
this.fragmentationFactor = fragmentationFactor;
this.safetyFactor = safetyFactor;
}
@Override
public long getBatchSize() {
return batchSize;
}
@Override
public int getNumRecords() {
return numRecords;
}
@Override
public boolean hadDataLastTime() {
return hasData;
}
@Override
public void updateStats() {
}
@Override
public long predictBatchSize(int desiredNumRecords, boolean reserveHash) {
Preconditions.checkState(hasData);
return BatchSizePredictorImpl.computeMaxBatchSize(batchSize,
numRecords,
desiredNumRecords,
fragmentationFactor,
safetyFactor,
reserveHash);
}
}
}
| kkhatua/drill | exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestBuildSidePartitioningImpl.java | Java | apache-2.0 | 18,568 |
//-----------------------------------------------------------------------
// <copyright file="AnchorController.cs" company="Google">
//
// Copyright 2018 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// </copyright>
//-----------------------------------------------------------------------
namespace GoogleARCore.Examples.CloudAnchors
{
using GoogleARCore;
using GoogleARCore.CrossPlatform;
using UnityEngine;
using UnityEngine.Networking;
/// <summary>
/// A Controller for the Anchor object that handles hosting and resolving the Cloud Anchor.
/// </summary>
public class AnchorController : NetworkBehaviour
{
/// <summary>
/// The Cloud Anchor ID that will be used to host and resolve the Cloud Anchor. This variable will be
/// syncrhonized over all clients.
/// </summary>
[SyncVar(hook = "_OnChangeId")]
private string m_CloudAnchorId = string.Empty;
/// <summary>
/// Indicates whether this script is running in the Host.
/// </summary>
private bool m_IsHost = false;
/// <summary>
/// Indicates whether an attempt to resolve the Cloud Anchor should be made.
/// </summary>
private bool m_ShouldResolve = false;
/// <summary>
/// The Cloud Anchors example controller.
/// </summary>
private CloudAnchorsExampleController m_CloudAnchorsExampleController;
/// <summary>
/// The Unity Start() method.
/// </summary>
public void Start()
{
m_CloudAnchorsExampleController = GameObject.Find("CloudAnchorsExampleController")
.GetComponent<CloudAnchorsExampleController>();
}
/// <summary>
/// The Unity OnStartClient() method.
/// </summary>
public override void OnStartClient()
{
if (m_CloudAnchorId != string.Empty)
{
m_ShouldResolve = true;
}
}
/// <summary>
/// The Unity Update() method.
/// </summary>
public void Update()
{
if (m_ShouldResolve)
{
_ResolveAnchorFromId(m_CloudAnchorId);
}
}
/// <summary>
/// Command run on the server to set the Cloud Anchor Id.
/// </summary>
/// <param name="cloudAnchorId">The new Cloud Anchor Id.</param>
[Command]
public void CmdSetCloudAnchorId(string cloudAnchorId)
{
m_CloudAnchorId = cloudAnchorId;
}
/// <summary>
/// Gets the Cloud Anchor Id.
/// </summary>
/// <returns>The Cloud Anchor Id.</returns>
public string GetCloudAnchorId()
{
return m_CloudAnchorId;
}
/// <summary>
/// Hosts the user placed cloud anchor and associates the resulting Id with this object.
/// </summary>
/// <param name="lastPlacedAnchor">The last placed anchor.</param>
public void HostLastPlacedAnchor(Component lastPlacedAnchor)
{
m_IsHost = true;
#if !UNITY_IOS
var anchor = (Anchor)lastPlacedAnchor;
#elif ARCORE_IOS_SUPPORT
var anchor = (UnityEngine.XR.iOS.UnityARUserAnchorComponent)lastPlacedAnchor;
#endif
#if !UNITY_IOS || ARCORE_IOS_SUPPORT
XPSession.CreateCloudAnchor(anchor).ThenAction(result =>
{
if (result.Response != CloudServiceResponse.Success)
{
Debug.Log(string.Format("Failed to host Cloud Anchor: {0}", result.Response));
m_CloudAnchorsExampleController.OnAnchorHosted(false, result.Response.ToString());
return;
}
Debug.Log(string.Format("Cloud Anchor {0} was created and saved.", result.Anchor.CloudId));
CmdSetCloudAnchorId(result.Anchor.CloudId);
m_CloudAnchorsExampleController.OnAnchorHosted(true, result.Response.ToString());
});
#endif
}
/// <summary>
/// Resolves an anchor id and instantiates an Anchor prefab on it.
/// </summary>
/// <param name="cloudAnchorId">Cloud anchor id to be resolved.</param>
private void _ResolveAnchorFromId(string cloudAnchorId)
{
m_CloudAnchorsExampleController.OnAnchorInstantiated(false);
// If device is not tracking, let's wait to try to resolve the anchor.
if (Session.Status != SessionStatus.Tracking)
{
return;
}
m_ShouldResolve = false;
XPSession.ResolveCloudAnchor(cloudAnchorId).ThenAction((System.Action<CloudAnchorResult>)(result =>
{
if (result.Response != CloudServiceResponse.Success)
{
Debug.LogError(string.Format("Client could not resolve Cloud Anchor {0}: {1}",
cloudAnchorId, result.Response));
m_CloudAnchorsExampleController.OnAnchorResolved(false, result.Response.ToString());
m_ShouldResolve = true;
return;
}
Debug.Log(string.Format("Client successfully resolved Cloud Anchor {0}.",
cloudAnchorId));
m_CloudAnchorsExampleController.OnAnchorResolved(true, result.Response.ToString());
_OnResolved(result.Anchor.transform);
}));
}
/// <summary>
/// Callback invoked once the Cloud Anchor is resolved.
/// </summary>
/// <param name="anchorTransform">Transform of the resolved Cloud Anchor.</param>
private void _OnResolved(Transform anchorTransform)
{
var cloudAnchorController = GameObject.Find("CloudAnchorsExampleController")
.GetComponent<CloudAnchorsExampleController>();
cloudAnchorController.SetWorldOrigin(anchorTransform);
}
/// <summary>
/// Callback invoked once the Cloud Anchor Id changes.
/// </summary>
/// <param name="newId">New identifier.</param>
private void _OnChangeId(string newId)
{
if (!m_IsHost && newId != string.Empty)
{
m_CloudAnchorId = newId;
m_ShouldResolve = true;
}
}
}
}
| googlesamples/arcore-illusive-images | Assets/GoogleARCore/Examples/CloudAnchors/Scripts/AnchorController.cs | C# | apache-2.0 | 7,121 |
package com.hubspot.baragon.agent.resources;
import com.google.inject.AbstractModule;
public class BargonAgentResourcesModule extends AbstractModule {
@Override
public void configure() {
bind(BatchRequestResource.class);
bind(MetricsResource.class);
bind(RequestResource.class);
bind(StatusResource.class);
bind(RenderedConfigsResource.class);
bind(PurgeCacheResource.class);
}
}
| HubSpot/Baragon | BaragonAgentService/src/main/java/com/hubspot/baragon/agent/resources/BargonAgentResourcesModule.java | Java | apache-2.0 | 413 |
package net.webservicex;
import javax.jws.WebMethod;
import javax.jws.WebParam;
import javax.jws.WebResult;
import javax.jws.WebService;
import javax.jws.soap.SOAPBinding;
import javax.xml.bind.annotation.XmlSeeAlso;
/**
* This class was generated by Apache CXF 3.1.12
* 2017-07-19T16:19:00.063-04:00
* Generated source version: 3.1.12
*
*/
@WebService(targetNamespace = "http://www.webservicex.net/", name = "GeoIPServiceHttpGet")
@XmlSeeAlso({ObjectFactory.class})
@SOAPBinding(parameterStyle = SOAPBinding.ParameterStyle.BARE)
public interface GeoIPServiceHttpGet {
/**
* GeoIPService - GetGeoIP enables you to easily look up countries by IP addresses
*/
@WebMethod(operationName = "GetGeoIP")
@WebResult(name = "GeoIP", targetNamespace = "http://www.webservicex.net/", partName = "Body")
public GeoIP getGeoIP(
@WebParam(partName = "IPAddress", name = "IPAddress", targetNamespace = "")
java.lang.String ipAddress
);
/**
* GeoIPService - GetGeoIPContext enables you to easily look up countries by Context
*/
@WebMethod(operationName = "GetGeoIPContext")
@WebResult(name = "GeoIP", targetNamespace = "http://www.webservicex.net/", partName = "Body")
public GeoIP getGeoIPContext();
}
| AndrewFedoseev/java_pft | soap-sample/src/main/java/net/webservicex/GeoIPServiceHttpGet.java | Java | apache-2.0 | 1,269 |
# Shared and common functions (declustering redundant code)
import numpy as np, os
import random, cv2
import operator
def get(link, save_as=False):
import urllib
base_dir = './tmp'
assert type(link) == str, type(link)
if not os.path.exists(base_dir):
os.makedirs(base_dir)
if save_as:
save_path = os.path.join(base_dir, save_as)
else:
save_path = os.path.join(base_dir, 'tmp.png')
urllib.urlretrieve(link, save_path)
im = cv2.imread(save_path)[:,:,[2,1,0]]
return im
def softmax(X, theta = 1.0, axis = None):
y = np.atleast_2d(X)
if axis is None:
axis = next(j[0] for j in enumerate(y.shape) if j[1] > 1)
y = y * float(theta)
y = y - np.expand_dims(np.max(y, axis = axis), axis)
y = np.exp(y)
ax_sum = np.expand_dims(np.sum(y, axis = axis), axis)
p = y / ax_sum
if len(X.shape) == 1: p = p.flatten()
return p
def sort_dict(d, sort_by='value'):
""" Sorts dictionary """
assert sort_by in ['value', 'key'], sort_by
if sort_by == 'key':
return sorted(d.items(), key=operator.itemgetter(0))
if sort_by == 'value':
return sorted(d.items(), key=operator.itemgetter(1))
def random_crop(im, crop_size, return_crop_loc=False):
""" Randomly crop """
h,w = np.shape(im)[:2]
hSt = random.randint(0, h - crop_size[0])
wSt = random.randint(0, w - crop_size[1])
patch = im[hSt:hSt+crop_size[0], wSt:wSt+crop_size[1], :]
assert tuple(np.shape(patch)[:2]) == tuple(crop_size)
if return_crop_loc:
return patch, (hSt, wSt)
return patch
def process_im(im):
""" Normalizes images into the range [-1.0, 1.0] """
im = np.array(im)
if np.max(im) <= 1:
# PNG format
im = (2.0 * im) - 1.0
else:
# JPEG format
im = 2.0 * (im / 255.) - 1.0
return im
def deprocess_im(im, dtype=None):
""" Map images in [-1.0, 1.0] back to [0, 255] """
im = np.array(im)
return ((255.0 * (im + 1.0))/2.0).astype(dtype)
def random_resize(im_a, im_b, same):
valid_interps = [cv2.INTER_NEAREST, cv2.INTER_LINEAR, cv2.INTER_CUBIC, cv2.INTER_LANCZOS4, cv2.INTER_AREA]
def get_param():
hr, wr = np.random.choice(np.linspace(0.5, 1.5, 11), 2)
#hr, wr = np.random.uniform(low=0.5, high=1.5, size=2)
interp = np.random.choice(valid_interps)
return [hr, wr, interp]
if same:
if np.random.randint(2):
a_par = get_param()
im_a = cv2.resize(im_a, None, fx=a_par[0], fy=a_par[1], interpolation=a_par[2])
im_b = cv2.resize(im_b, None, fx=a_par[0], fy=a_par[1], interpolation=a_par[2])
else:
a_par = get_param()
im_a = cv2.resize(im_a, None, fx=a_par[0], fy=a_par[1], interpolation=a_par[2])
if np.random.randint(2):
b_par = get_param()
while np.all(a_par == b_par):
b_par = get_param()
im_b = cv2.resize(im_b, None, fx=b_par[0], fy=b_par[1], interpolation=b_par[2])
return im_a, im_b
def random_jpeg(im_a, im_b, same):
def get_param():
#jpeg_quality_a = np.random.randint(50, 100) # doesnt include 100
return np.random.choice(np.linspace(50, 100, 11))
if same:
if np.random.randint(2):
a_par = get_param()
_, enc_a = cv2.imencode('.jpg', im_a, [int(cv2.IMWRITE_JPEG_QUALITY), a_par])
im_a = cv2.imdecode(enc_a, 1)
_, enc_b = cv2.imencode('.jpg', im_b, [int(cv2.IMWRITE_JPEG_QUALITY), a_par])
im_b = cv2.imdecode(enc_b, 1)
else:
a_par = get_param()
_, enc_a = cv2.imencode('.jpg', im_a, [int(cv2.IMWRITE_JPEG_QUALITY), a_par])
im_a = cv2.imdecode(enc_a, 1)
if np.random.randint(2):
b_par = get_param()
while np.all(a_par == b_par):
b_par = get_param()
_, enc_b = cv2.imencode('.jpg', im_b, [int(cv2.IMWRITE_JPEG_QUALITY), b_par])
im_b = cv2.imdecode(enc_b, 1)
return im_a, im_b
def gaussian_blur(im, kSz=None, sigma=1.0):
# 5x5 kernel blur
if kSz is None:
kSz = np.ceil(3.0 * sigma)
kSz = kSz + 1 if kSz % 2 == 0 else kSz
kSz = max(kSz, 3) # minimum kernel size
kSz = int(kSz)
blur = cv2.GaussianBlur(im,(kSz,kSz), sigma)
return blur
def random_blur(im_a, im_b, same):
# only square gaussian kernels
def get_param():
kSz = (2 * np.random.randint(1, 8)) + 1 # [3, 15]
sigma = np.random.choice(np.linspace(1.0, 5.0, 9))
#sigma = np.random.uniform(low=1.0, high=5.0, size=None) # 3 * sigma = kSz
return [kSz, sigma]
if same:
if np.random.randint(2):
a_par = get_param()
im_a = cv2.GaussianBlur(im_a, (a_par[0], a_par[0]), a_par[1])
im_b = cv2.GaussianBlur(im_b, (a_par[0], a_par[0]), a_par[1])
else:
a_par = get_param()
im_a = cv2.GaussianBlur(im_a, (a_par[0], a_par[0]), a_par[1])
if np.random.randint(2):
b_par = get_param()
while np.all(a_par == b_par):
b_par = get_param()
im_b = cv2.GaussianBlur(im_b, (b_par[0], b_par[0]), b_par[1])
return im_a, im_b
def random_noise(im):
noise = np.random.randn(*np.shape(im)) * 10.0
return np.array(np.clip(noise + im, 0, 255.0), dtype=np.uint8)
| minyoungg/selfconsistency | lib/utils/util.py | Python | apache-2.0 | 5,391 |
package org.yadi.core;
/*
Copyright 2014 Julian Exenberger
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import java.lang.reflect.Constructor;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import static org.yadi.core.ReflectionUtils.resolveConstructor;
/**
* Created by julian3 on 2014/05/03.
*/
public interface Construction<T> extends Consumer<Arguments> {
default T createWithConstructor(Arguments arguments, Class<T> implementation, Function<Reference, Object> typeResolver, Function<Reference, Object> objectResolver) {
accept(arguments);
try {
if (arguments.size() == 0) {
return implementation.newInstance();
}
Object[] args = arguments.stream().map((pair) -> {
if (pair.getCdr() instanceof Reference && objectResolver != null) {
Reference ref = (Reference) pair.getCdr();
return objectResolver.apply(ref);
} else {
return pair.getCdr();
}
}).toArray();
Class[] types = arguments.stream()
.map((pair) -> {
if (pair.getCdr() instanceof Reference && typeResolver != null && pair.getCar() == null) {
Reference ref = (Reference) pair.getCdr();
return typeResolver.apply(ref);
} else {
return pair.getCar();
}
})
.collect(Collectors.toList())
.toArray(new Class[]{});
Constructor<T> theConstructor = resolveConstructor(implementation, types);
return theConstructor.newInstance(args);
} catch (Exception e) {
throw new ContainerException(e);
}
}
}
| jexenberger/yadi | src/main/java/org/yadi/core/Construction.java | Java | apache-2.0 | 2,469 |
/*
* GeoTools - The Open Source Java GIS Toolkit
* http://geotools.org
*
* (C) 2008, Open Source Geospatial Foundation (OSGeo)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation;
* version 2.1 of the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*/
package org.geotools.xacml.geoxacml.cond;
import java.net.URISyntaxException;
import java.util.List;
import org.geotools.xacml.geoxacml.attr.GeometryAttribute;
import org.wso2.balana.attr.AttributeValue;
import org.wso2.balana.cond.Evaluatable;
import org.wso2.balana.cond.EvaluationResult;
import org.wso2.balana.ctx.EvaluationCtx;
import com.vividsolutions.jts.geom.Geometry;
/**
* Calculates the centroid
*
* @author Christian Mueller
*
*/
public class GeometryCentroid extends GeometryConstructFunction {
public static final String NAME = NAME_PREFIX + "geometry-centroid";
public GeometryCentroid() {
super(NAME, 0, new String[] { GeometryAttribute.identifier },
new boolean[] { false, false }, GeometryAttribute.identifier, false);
}
// public EvaluationResult evaluate(List<? extends Expression> inputs, EvaluationCtx context) {
public EvaluationResult evaluate(List<Evaluatable> inputs, EvaluationCtx context) {
AttributeValue[] argValues = new AttributeValue[inputs.size()];
EvaluationResult result = evalArgs(inputs, context, argValues);
if (result != null)
return result;
GeometryAttribute geomAttr = (GeometryAttribute) (argValues[0]);
Geometry resultGeom = null;
try {
resultGeom = geomAttr.getGeometry().getCentroid();
} catch (Throwable t) {
return exceptionError(t);
}
GeometryAttribute resultAttr = null;
try {
resultAttr = new GeometryAttribute(resultGeom, geomAttr.getSrsName(), null, null, null);
} catch (URISyntaxException e) {
// should not happend
}
return new EvaluationResult(resultAttr);
}
}
| TU-Berlin-SNET/tresor-pdp | modules/geoxacml/src/main/java/org/geotools/xacml/geoxacml/cond/GeometryCentroid.java | Java | apache-2.0 | 2,392 |
angular.module('dCare.Authentication', ['ionic',
'dCare.Services.UserStore', 'dCare.ApiInvoker'])
.factory("AuthenticationService", function ($q, UserStore, ApiInvokerService, $ionicLoading, $mdDialog) {
var login = function (userID,password) {
//NR: TODO: Call Rest api for login
//NR: TODO: if Login Success => merge the recieved User Data with existing User Data {wireup following code}
var deferredLogin = $q.defer();
//NR: Mock call
var apiPayLoad = { email: userID, password: password };
ApiInvokerService.login(apiPayLoad).then(function (remoteUserData) { /// This Call will be replased by Actual Login Service call-promise
UserStore.getUser().then(function (user) {
var localUserData = (user) ? user : {};
localUserData.firstName = remoteUserData.firstname;
localUserData.lastName = remoteUserData.lastname;
localUserData.email = remoteUserData.email;
localUserData.photo = remoteUserData.photo;
localUserData.authToken = remoteUserData.token;
localUserData.tokenExpiryDate = (parseJWT(remoteUserData.token)).exp;
localUserData.loginDatetime = remoteUserData.loginDatetime;
localUserData.patients = (localUserData.patients) ? localUserData.patients : [];
var patientFound;
angular.forEach(remoteUserData.patients, function (remotePatient, key) {
patientFound = false;
//NR: Attempt to find if patient aready present locally
angular.forEach(localUserData.patients, function (localPatient, key) {
if (localPatient.guid === remotePatient.guid) {
patientFound = true;
localPatient.isDefault = remotePatient.isDefault;
localPatient.fullName = remotePatient.fullName;
localPatient.photo = remotePatient.photo;
}
});
//NR: If Patient does not exist, add Sync Flags, add Patient.
if (!patientFound) {
remotePatient.isEdited = false;
remotePatient.initialSyncStatus = "notSynced";
remotePatient.syncStatus = "notSynced";
remotePatient.syncStartDate = '';
remotePatient.syncEndDate = '';
localUserData.patients.push(remotePatient);
}
});
UserStore.save(localUserData).then(function (localUserData) {
//@NR: Saved Latest User Data
deferredLogin.resolve(localUserData);
//@NR: NOTE: Post login, the initi module will automatically handle initial sync responsiblity.
}).fail(function (err) {
//@NR: Save Latest Data Failed, Try Again to Login.
deferredLogin.reject(err);
});
}).fail(function (err) {
// Login Failed try login again.
deferredLogin.reject(err);
});
}).catch(function (error) {
// Login Service call Failed try login again.
app.log.error("Failed Login Call [Error]- " + JSON.stringify(error));
deferredLogin.reject("Unable to connect server !!");
});
return deferredLogin.promise;
};
var refreshToken = function () {
var deferredLogin = $q.defer();
UserStore.getUser().then(function (localUserData) { /// This Call will be replased by Actual Login Service call-promise
ApiInvokerService.refreshToken().then(function (remoteUserData) {
if (localUserData && localUserData.email) {
app.context.dbAuthCookie = remoteUserData.token;
var jwtPayload = parseJWT(remoteUserData.token);
////NR: Only update tokn & login info
localUserData.authToken = remoteUserData.token;
localUserData.tokenExpiryDate = jwtPayload.exp;
////localUserData.loginDatetime = remoteUserData.loginDatetime;
////NR: Set Cookie in app context for consumption by Sync-Services
//app.context.dbAuthCookie = jwtPayload.cookie;
////@NR: Save token to User Data
UserStore.save(localUserData).then(function (localUserData) {
deferredLogin.resolve(localUserData);
}).fail(function (err) {
//@NR: Save Latest Data Failed, Try Again to Login.
deferredLogin.reject(err);
});
} else {
deferredLogin.reject("No User");
}
}).catch(function (err) {
// refreshToken Failed try login again.
deferredLogin.reject(err);
});
}).fail(function (error) {
// Login Service call Failed try login again.
app.log.error("Failed Token Refresh Call [Error]-" + JSON.stringify(error));
deferredLogin.reject("Unable to connect server !!");
});
return deferredLogin.promise;
};
var logout = function () {
var deferredLogout = $q.defer();
UserStore.getUser().then(function (user) {
user.authToken = '';
user.tokenExpiryDate = '';
user.patients = [];
user.loginDatetime = '';
UserStore.save(user).then(function () {
deferredLogout.resolve();
}).fail(function (err) {
//@NR: Save Data Failed, Try Again to Login.
deferredLogout.reject(err);
});
}).fail(function (err) {
// Failed Logout, try again.
deferredLogout.reject(err);
});
return deferredLogout.promise;
};
var checkLogin = function () {
var deferredLoginCheck = $q.defer();
UserStore.getUser().then(function (user) {
//NR: Validate Login [could hav more criteria check]
if (user && user.authToken && castToLongDate(user.tokenExpiryDate) > castToLongDate(new Date())) {
deferredLoginCheck.resolve();
}else{
deferredLoginCheck.reject();
}
}).fail(function () {
deferredLoginCheck.reject();
});
return deferredLoginCheck.promise;
};
var register = function (user) {
var deferredRegisterUser = $q.defer();
//NR: TODO: Do basic User Data validation
//NR: NOTE: User Data will be added to Local store during Successfull login process, and not here
//var mockUserResponse = { authToken: '_T_O_K_E_N_', tokenExpiryDate: castToLongDate(new Date("12/12/2050")), patients: [], loginDatetime: castToLongDate(new Date()) };
//var mockUserData = angular.extend({}, user, mockUserResponse);
ApiInvokerService.signup(user).then(function (newUserData) {
// UserStore.save(newUserData).then(function (userData) {
deferredRegisterUser.resolve(newUserData);
// }).fail(function () {
// deferredRegisterUser.reject();
// });
}).catch(function (err) {
deferredRegisterUser.reject();
});
return deferredRegisterUser.promise;
};
//@private
var decodeBas64 = function (base64string) {
var decodedString = "";
try {
decodedString = window.atob(base64string)
} catch(err) {
decodedString = "";
console.log(err);
}
return decodedString;
};
//@private
var parseJWT = function (token) {
var base64Url = token.split('.')[1];
var base64 = base64Url.replace('-', '+').replace('_', '/');
var payload = {exp:''};
var base64Decoded = decodeBas64(base64);
if (base64Decoded) {
payload = JSON.parse(base64Decoded);
//Convert time-stamps to "milliseconds", because server sends in "seconds"
payload.exp = payload.exp * 1000;
payload.iat = payload.iat * 1000;
}
return payload;
};
return {
login: login,
logout: logout,
checkLogin: checkLogin,
register: register,
refreshToken: refreshToken
};
}); | nozelrosario/Dcare | www/controllers/user/authenticationService.js | JavaScript | apache-2.0 | 8,882 |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.plugins.thrift.structure;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.structureView.StructureViewModel;
import com.intellij.ide.structureView.StructureViewModelBase;
import com.intellij.ide.structureView.StructureViewTreeElement;
import com.intellij.ide.util.treeView.smartTree.*;
import com.intellij.plugins.thrift.lang.psi.ThriftDeclaration;
import com.intellij.plugins.thrift.lang.psi.ThriftSubDeclaration;
import com.intellij.plugins.thrift.lang.psi.ThriftTopLevelDeclaration;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.util.PlatformIcons;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
/**
* @author: Fedor.Korotkov
*/
public class ThriftStructureViewModel extends StructureViewModelBase implements StructureViewModel.ElementInfoProvider {
public ThriftStructureViewModel(@NotNull PsiFile psiFile) {
super(psiFile, new ThriftStructureViewElement(psiFile));
withSorters(Sorter.ALPHA_SORTER);
withSuitableClasses(ThriftDeclaration.class);
}
@Override
public boolean isAlwaysShowsPlus(StructureViewTreeElement element) {
return false;
}
@NotNull
@Override
public Filter[] getFilters() {
return new Filter[]{ourFieldsFilter};
}
@Override
public boolean isAlwaysLeaf(StructureViewTreeElement element) {
final Object value = element.getValue();
return value instanceof ThriftSubDeclaration;
}
@Override
public boolean shouldEnterElement(Object element) {
return element instanceof ThriftTopLevelDeclaration;
}
private static final Filter ourFieldsFilter = new Filter() {
@NonNls public static final String ID = "SHOW_FIELDS";
public boolean isVisible(TreeElement treeNode) {
if (!(treeNode instanceof ThriftStructureViewElement)) return true;
final PsiElement element = ((ThriftStructureViewElement)treeNode).getRealElement();
return !(element instanceof ThriftSubDeclaration);
}
public boolean isReverted() {
return true;
}
@NotNull
public ActionPresentation getPresentation() {
return new ActionPresentationData(
IdeBundle.message("action.structureview.show.fields"),
null,
PlatformIcons.FIELD_ICON
);
}
@NotNull
public String getName() {
return ID;
}
};
}
| fkorotkov/intellij-thrift | thrift/src/main/java/com/intellij/plugins/thrift/structure/ThriftStructureViewModel.java | Java | apache-2.0 | 2,968 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.gluedatabrew.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.gluedatabrew.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* ExcelOptions JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ExcelOptionsJsonUnmarshaller implements Unmarshaller<ExcelOptions, JsonUnmarshallerContext> {
public ExcelOptions unmarshall(JsonUnmarshallerContext context) throws Exception {
ExcelOptions excelOptions = new ExcelOptions();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("SheetNames", targetDepth)) {
context.nextToken();
excelOptions.setSheetNames(new ListUnmarshaller<String>(context.getUnmarshaller(String.class))
.unmarshall(context));
}
if (context.testExpression("SheetIndexes", targetDepth)) {
context.nextToken();
excelOptions.setSheetIndexes(new ListUnmarshaller<Integer>(context.getUnmarshaller(Integer.class))
.unmarshall(context));
}
if (context.testExpression("HeaderRow", targetDepth)) {
context.nextToken();
excelOptions.setHeaderRow(context.getUnmarshaller(Boolean.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return excelOptions;
}
private static ExcelOptionsJsonUnmarshaller instance;
public static ExcelOptionsJsonUnmarshaller getInstance() {
if (instance == null)
instance = new ExcelOptionsJsonUnmarshaller();
return instance;
}
}
| aws/aws-sdk-java | aws-java-sdk-gluedatabrew/src/main/java/com/amazonaws/services/gluedatabrew/model/transform/ExcelOptionsJsonUnmarshaller.java | Java | apache-2.0 | 3,301 |
'use strict';
/**
* @ngdoc function
* @name adsGtApp.controller:AboutCtrl
* @description
* # AboutCtrl
* Controller of the adsGtApp
*/
angular.module('adsGtApp')
.controller('DrugCtrl', ['$scope', '$http','$modal','FDA_API', function ($scope, $http, $modal, FDA_API) {
// console.log(FDA_API.drugEvent);
function getDrugList() {
$http.get(FDA_API.drugEvent+'?count=patient.drug.medicinalproduct.exact&limit=1000')
.success(function(response) {
var results=response.results;
// console.log(results);
$scope.drugList=results.map(function(item){return item.term.toLowerCase();});
});
}
getDrugList();
// $http.get(FDA_API.drugEvent+"?count=patient.drug.medicinalproduct.exact&limit=1000")
// .success(function(response) {
// var results=response.results;
// console.log(results);
// $scope.drugList=results.map(function(item){return item.term.toLowerCase();});
// console.log($scope.drugList);
// });
$scope.onClick=function() {
if ($scope.selected)
$scope.onSelect($scope.selected, $scope.selected, $scope.selected)
}
$scope.paddingForLabel=0;
$scope.onSelect=function($item, $model, $label) {
// console.log($item, $model, $label);
var maxTextLength=function(a, b) { return Math.max(a, b.label.length);};
var chartValues1;
var chartValues2;
var chartHeight1;
var chartHeight2;
var drug=$item.replace(' ', '+');
var fdaUrl1=FDA_API.drugEvent+'?search=patient.drug.medicinalproduct:"'+drug+'"+AND+serious:1&count=patient.reaction.reactionmeddrapt.exact';
var fdaUrl2=FDA_API.drugEvent+'?search=patient.drug.medicinalproduct:"'+drug+'"+AND+serious:2&count=patient.reaction.reactionmeddrapt.exact';
// console.log(fdaUrl1);
$http.get(fdaUrl1).success(function(response) {
var results=response.results;
// console.log(results);
var resultCount=results.length;
chartHeight1=resultCount*50;
chartValues1=results.map(function(item) {return {"label":item.term.toLowerCase(), "value":Math.floor(item.count)};});
var maxEffectLength=chartValues1.reduce(maxTextLength, 0);
$http.get(fdaUrl2).success(function(response) {
var results=response.results;
// console.log(results);
var resultCount=results.length;
chartHeight2=resultCount*50;
chartValues2=results.map(function(item) {return {"label":item.term.toLowerCase(), "value":Math.floor(item.count)};});
maxEffectLength= chartValues2.reduce(maxTextLength,maxEffectLength);
$scope.chartData[0].values=chartValues1;
// $scope.chartData[0].key="Side Effect Count1";
$scope.chartData[1].values=chartValues2;
// $scope.chartData[1].key="Side Effect Count2";
$scope.chartOptions.chart.height=chartHeight1+chartHeight2;
$scope.chartOptions.title.text="Reported Side Effects Related to "+$item;
if (!$scope.chartConfig.visible)
$scope.chartConfig.visible=true;
});
maxEffectLength=Math.ceil(maxEffectLength*4.7);
if (maxEffectLength>120)
maxEffectLength=120;
$scope.paddingForLabel=maxEffectLength;
});
var labelUrl=FDA_API.drugLabel+'?search=substance_name:"'+drug+'"';
$http.get(labelUrl).success(function(response) {
var results=response.results[0];
console.log(results);
$scope.openFdaLabel=results.openfda;
console.log("openfda");
console.log(results.openfda);
delete results.openfda;
// delete results.set_id;
// delete results.id;
// delete results.effective_time;
$scope.drugLabel=results;
$scope.labelNotFound=undefined;
}).error(function() {
$scope.labelNotFound="No labeling information is found for "+$item;
});
};
$scope.formatLabel=function(val) {
var result;
if (typeof val !=="object")
return val;
for (var i=0; val.length && i<val.length; i++) {
result =result?result+", "+val[i]:val[i];
}
return result?result:val;
};
$scope.chartOptions = {
chart: {
type: 'multiBarHorizontalChart',
height: 450,
x: function(d){return d.label;},
y: function(d){return d.value;},
showControls: false,
showValues: true,
transitionDuration: 500,
valueFormat: function (n){return d3.format(',')(n);},
showYAxis:false,
tooltipContent: function(key, x, y, e, graph){return '<center><span class="tooltipUpper">'+key+'</span><br>'+x+': '+y+'</center>'},
xAxis: {
showMaxMin: false,
margin: {
top:0,
left:0,
bottom:0,
right:100
}
},
yAxis: {
axisLabel: 'Incident Count',
tickFormat: function(d){
return d3.format(',')(d);
},
showMaxMin: false
},
},
title: {
enable: true,
text:'',
class: 'h3'
}
};
$scope.chartConfig={
visible: false
}
$scope.chartData = [
{
"key": 'Serious Incidents',
"color": "#d62728",
"values": [
]
},
{
"key": 'Not Serious Incidents',
"color": "#1f77b4",
"values": [
]
}
];
$scope.open = function (size) {
var modalInstance = $modal.open({
animation: false,
templateUrl: 'aboutModalContent.html',
controller: 'ModalInstanceCtrl',
size: size,
// resolve: {
// items: function () {
// return $scope.items;
// }
// }
});
modalInstance.result.then(function (returned) {
console.log("modal returned="+returned);
console.info("modal closed");
// $('.modal').remove();
// $('body').removeClass('modal-open');
// $('.modal-backdrop').remove();
}, function () {
console.log('Modal dismissed at: ' + new Date());
});
};
}]);
angular.module('adsGtApp').controller('ModalInstanceCtrl', function ($scope, $modalInstance) {
$scope.close = function () {
$modalInstance.close("I am closed");
};
});
| globaltunnels/bowlofhygieia | app/scripts/controllers/drug.js | JavaScript | apache-2.0 | 7,212 |
// Copyright 2016 Kai Chen <281165273@qq.com> (@grapebaba)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package spec
import (
"fmt"
"strings"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/pkg/api/v1"
)
const (
PeersTPRKind = "cluster"
PeerClusterTPRDescription = "Managed hyperledger fabric peer cluster"
)
func PeersTPRName() string {
return fmt.Sprintf("%s.%s", PeersTPRKind, TPRGroup)
}
type PeerCluster struct {
metav1.TypeMeta `json:",inline"`
Metadata v1.ObjectMeta `json:"metadata,omitempty"`
Spec PeerClusterSpec `json:"spec"`
Status ClusterStatus `json:"status"`
}
func (c *PeerCluster) AsOwner() metav1.OwnerReference {
trueVar := true
// TODO: In 1.6 this is gonna be "k8s.io/kubernetes/pkg/apis/meta/v1"
// Both api.OwnerReference and metatypes.OwnerReference are combined into that.
return metav1.OwnerReference{
APIVersion: c.APIVersion,
Kind: c.Kind,
Name: c.Metadata.Name,
UID: c.Metadata.UID,
Controller: &trueVar,
}
}
type MSPSpec struct {
AdminCerts map[string][]byte `json:"admin_certs"`
CACerts map[string][]byte `json:"ca_certs"`
KeyStore map[string][]byte `json:"key_store"`
SignCerts map[string][]byte `json:"sign_certs"`
IntermediateCerts map[string][]byte `json:"intermediate_certs,omitempty"`
}
type IdentitySpec struct {
OrgMSPId string `json:"org_msp_id"`
MSP *MSPSpec `json:"msp"`
}
type TLSSpec struct {
PeerCert []byte `json:"peer_cert,omitempty"`
PeerKey []byte `json:"peer_key,omitempty"`
PeerRootCert []byte `json:"peer_root_cert,omitempty"`
VMCert []byte `json:"vm_cert,omitempty"`
VMKey []byte `json:"vm_key,omitempty"`
VMRootCert []byte `json:"vm_root_cert,omitempty"`
}
type PeerSpec struct {
Identity *IdentitySpec `json:"identity"`
TLS *TLSSpec `json:"tls,omitempty"`
// Channels defines the channels of this peer own to.
Channels []string `json:"channels,omitempty"`
Chain string `json:"chain"`
Config map[string]string `json:"config,omitempty"`
}
type PeerClusterSpec struct {
ClusterSpec `json:",inline"`
// Peers defines the secure info and config for the peer cluster
Peers []*PeerSpec `json:"peers"`
}
// Cleanup cleans up user passed spec, e.g. defaulting, transforming fields.
// TODO: move this to admission controller
func (c *PeerClusterSpec) Cleanup() {
if len(c.Version) == 0 {
c.Version = defaultVersion
}
c.Version = strings.TrimLeft(c.Version, "v")
}
func (c *PeerClusterSpec) Validate() error {
return nil
}
| GrapeBaBa/fabric-operator | spec/peer_cluster.go | GO | apache-2.0 | 3,019 |
/*
* Copyright (c) 2005-2009 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.test.acceptance.admin;
import org.mifos.framework.util.DbUnitUtilities;
import org.mifos.test.acceptance.framework.AppLauncher;
import org.mifos.test.acceptance.framework.HomePage;
import org.mifos.test.acceptance.framework.MifosPage;
import org.mifos.test.acceptance.framework.UiTestCaseBase;
import org.mifos.test.acceptance.framework.admin.AdminPage;
import org.mifos.test.acceptance.framework.admin.ViewFundsPage;
import org.mifos.test.acceptance.remote.InitializeApplicationRemoteTestingService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import org.springframework.test.context.ContextConfiguration;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@ContextConfiguration(locations = { "classpath:ui-test-context.xml" })
@Test(sequential = true, groups = {"acceptance","ui"})
public class ViewFundsTest extends UiTestCaseBase {
private AppLauncher appLauncher;
@Autowired
private DriverManagerDataSource dataSource;
@Autowired
private DbUnitUtilities dbUnitUtilities;
@Autowired
private InitializeApplicationRemoteTestingService initRemote;
private static String dataFileName = "acceptance_small_003_dbunit.xml.zip";
@SuppressWarnings("PMD.SignatureDeclareThrowsException")
// one of the dependent methods throws Exception
@BeforeMethod
public void setUp() throws Exception {
super.setUp();
appLauncher = new AppLauncher(selenium);
}
@AfterMethod
public void tearDown() {
(new MifosPage(selenium)).logout();
}
public void verifyViewFundsTest() {
AdminPage adminPage = loginAndGoToAdminPage();
ViewFundsPage viewFundsPage = adminPage.navigateToViewFundsPage();
viewFundsPage.verifyPage();
}
@SuppressWarnings("PMD.SignatureDeclareThrowsException")
public void verifyViewFundsTableContentsTest() throws Exception {
initRemote.dataLoadAndCacheRefresh(dbUnitUtilities,
dataFileName,
dataSource, selenium);
AdminPage adminPage = loginAndGoToAdminPage();
ViewFundsPage viewFundsPage = adminPage.navigateToViewFundsPage();
String[] expectedFundNames = new String[]{
"Non Donor",
"Funding Org A",
"Funding Org B",
"Funding Org C",
"Funding Org D"
};
String[] expectedFundCodes = new String[]{
"00",
"00",
"00",
"00",
"00",
};
viewFundsPage.verifyFundName(expectedFundNames);
viewFundsPage.verifyFundCode(expectedFundCodes);
}
private AdminPage loginAndGoToAdminPage() {
HomePage homePage = appLauncher.launchMifos().loginSuccessfullyUsingDefaultCredentials();
homePage.verifyPage();
AdminPage adminPage = homePage.navigateToAdminPage();
adminPage.verifyPage();
return adminPage;
}
}
| mifos/1.4.x | acceptanceTests/src/test/java/org/mifos/test/acceptance/admin/ViewFundsTest.java | Java | apache-2.0 | 3,999 |
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("09.DeleteOddLines")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("09.DeleteOddLines")]
[assembly: AssemblyCopyright("Copyright © 2014")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("442ab2d1-1b41-4969-886d-071bcf4401fe")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| PolyaNM/TelerikAcademy | C#/CSharp2-Homework12-TextFiles/09.DeleteOddLines/Properties/AssemblyInfo.cs | C# | apache-2.0 | 1,410 |
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.rest.impl;
import com.fasterxml.jackson.jaxrs.json.JacksonJsonProvider;
import org.camunda.bpm.engine.rest.exception.ExceptionHandler;
import org.camunda.bpm.engine.rest.exception.JsonMappingExceptionHandler;
import org.camunda.bpm.engine.rest.exception.JsonParseExceptionHandler;
import org.camunda.bpm.engine.rest.exception.ProcessEngineExceptionHandler;
import org.camunda.bpm.engine.rest.exception.RestExceptionHandler;
import org.camunda.bpm.engine.rest.hal.JacksonHalJsonProvider;
import org.camunda.bpm.engine.rest.mapper.JacksonConfigurator;
import org.camunda.bpm.engine.rest.mapper.MultipartPayloadProvider;
import java.util.HashSet;
import java.util.Set;
/**
* @author Tassilo Weidner
*/
public class CamundaRestResources {
private static final Set<Class<?>> RESOURCE_CLASSES = new HashSet<Class<?>>();
private static final Set<Class<?>> CONFIGURATION_CLASSES = new HashSet<Class<?>>();
static {
RESOURCE_CLASSES.add(JaxRsTwoNamedProcessEngineRestServiceImpl.class);
RESOURCE_CLASSES.add(JaxRsTwoDefaultProcessEngineRestServiceImpl.class);
CONFIGURATION_CLASSES.add(JacksonConfigurator.class);
CONFIGURATION_CLASSES.add(JacksonJsonProvider.class);
CONFIGURATION_CLASSES.add(JsonMappingExceptionHandler.class);
CONFIGURATION_CLASSES.add(JsonParseExceptionHandler.class);
CONFIGURATION_CLASSES.add(ProcessEngineExceptionHandler.class);
CONFIGURATION_CLASSES.add(RestExceptionHandler.class);
CONFIGURATION_CLASSES.add(MultipartPayloadProvider.class);
CONFIGURATION_CLASSES.add(JacksonHalJsonProvider.class);
CONFIGURATION_CLASSES.add(ExceptionHandler.class);
}
/**
* Returns a set containing all resource classes provided by camunda BPM.
* @return a set of resource classes.
*/
public static Set<Class<?>> getResourceClasses() {
return RESOURCE_CLASSES;
}
/**
* Returns a set containing all provider / mapper / config classes used in the
* default setup of the camunda REST api.
* @return a set of provider / mapper / config classes.
*/
public static Set<Class<?>> getConfigurationClasses() {
return CONFIGURATION_CLASSES;
}
}
| falko/camunda-bpm-platform | engine-rest/engine-rest-jaxrs2/src/main/java/org/camunda/bpm/engine/rest/impl/CamundaRestResources.java | Java | apache-2.0 | 2,980 |
package de.mygrades.view.activity;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.MenuItem;
import de.mygrades.R;
/**
* Created by tilman on 12.12.15.
*/
public class SettingsActivity extends AppCompatActivity {
private SettingsFragment settingsFragment;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_settings);
if (savedInstanceState == null) {
settingsFragment = new SettingsFragment();
getSupportFragmentManager().beginTransaction().add(R.id.content, settingsFragment, "settings").commit();
} else {
settingsFragment = (SettingsFragment) getSupportFragmentManager().findFragmentByTag("settings");
}
// set toolbar
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setTitle(R.string.toolbar_settings);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
onBackPressed();
return true;
}
return false;
}
}
| MyGrades/mygrades-app | app/src/main/java/de/mygrades/view/activity/SettingsActivity.java | Java | apache-2.0 | 1,383 |
package com.markwatson.deeplearning;
import org.deeplearning4j.datasets.fetchers.CSVDataFetcher;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
/**
* Created by markw on 10/5/15.
*/
public class WisconsinDataFetcher extends CSVDataFetcher {
public WisconsinDataFetcher() throws FileNotFoundException {
super(new FileInputStream("data/cleaned_wisconsin_cancer_data.csv"), 9);
}
@Override
public void fetch(int i) {
super.fetch(i);
}
}
| mark-watson/power-java | deeplearning/src/main/java/com/markwatson/deeplearning/WisconsinDataFetcher.java | Java | apache-2.0 | 482 |
/*!
* Start Bootstrap - Freelancer Bootstrap Theme (http://startbootstrap.com)
* Code licensed under the Apache License v2.0.
* For details, see http://www.apache.org/licenses/LICENSE-2.0.
*/
| sederaa/gozouk | app/scripts/freelancer.js | JavaScript | apache-2.0 | 196 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import _ from 'lodash';
import {nonEmpty, nonNil} from 'app/utils/lodashMixins';
import { BehaviorSubject } from 'rxjs/BehaviorSubject';
import 'rxjs/add/operator/first';
import Worker from './decompress.worker';
import SimpleWorkerPool from '../../utils/SimpleWorkerPool';
import maskNull from 'app/core/utils/maskNull';
import {ClusterSecretsManager} from './types/ClusterSecretsManager';
import ClusterLoginService from './components/cluster-login/service';
const State = {
DISCONNECTED: 'DISCONNECTED',
AGENT_DISCONNECTED: 'AGENT_DISCONNECTED',
CLUSTER_DISCONNECTED: 'CLUSTER_DISCONNECTED',
CONNECTED: 'CONNECTED'
};
const IGNITE_2_0 = '2.0.0';
const LAZY_QUERY_SINCE = [['2.1.4-p1', '2.2.0'], '2.2.1'];
const COLLOCATED_QUERY_SINCE = [['2.3.5', '2.4.0'], ['2.4.6', '2.5.0'], '2.5.2'];
// Error codes from o.a.i.internal.processors.restGridRestResponse.java
const SuccessStatus = {
/** Command succeeded. */
STATUS_SUCCESS: 0,
/** Command failed. */
STATUS_FAILED: 1,
/** Authentication failure. */
AUTH_FAILED: 2,
/** Security check failed. */
SECURITY_CHECK_FAILED: 3
};
class ConnectionState {
constructor(cluster) {
this.agents = [];
this.cluster = cluster;
this.clusters = [];
this.state = State.DISCONNECTED;
}
updateCluster(cluster) {
this.cluster = cluster;
this.cluster.connected = !!_.find(this.clusters, {id: this.cluster.id});
return cluster;
}
update(demo, count, clusters) {
_.forEach(clusters, (cluster) => {
cluster.name = cluster.id;
});
this.clusters = clusters;
if (_.isEmpty(this.clusters))
this.cluster = null;
if (_.isNil(this.cluster))
this.cluster = _.head(clusters);
if (nonNil(this.cluster))
this.cluster.connected = !!_.find(clusters, {id: this.cluster.id});
if (count === 0)
this.state = State.AGENT_DISCONNECTED;
else if (demo || _.get(this.cluster, 'connected'))
this.state = State.CONNECTED;
else
this.state = State.CLUSTER_DISCONNECTED;
}
useConnectedCluster() {
if (nonEmpty(this.clusters) && !this.cluster.connected) {
this.cluster = _.head(this.clusters);
this.cluster.connected = true;
this.state = State.CONNECTED;
}
}
disconnect() {
this.agents = [];
if (this.cluster)
this.cluster.disconnect = true;
this.clusters = [];
this.state = State.DISCONNECTED;
}
}
export default class AgentManager {
static $inject = ['$rootScope', '$q', '$transitions', 'igniteSocketFactory', 'AgentModal', 'UserNotifications', 'IgniteVersion', ClusterLoginService.name];
/** @type {ng.IScope} */
$root;
/** @type {ng.IQService} */
$q;
/** @type {ClusterLoginService} */
ClusterLoginSrv;
/** @type {String} */
clusterVersion = '2.4.0';
connectionSbj = new BehaviorSubject(new ConnectionState(AgentManager.restoreActiveCluster()));
/** @type {ClusterSecretsManager} */
clustersSecrets = new ClusterSecretsManager();
pool = new SimpleWorkerPool('decompressor', Worker, 4);
/** @type {Set<ng.IDifferend>} */
promises = new Set();
socket = null;
static restoreActiveCluster() {
try {
return JSON.parse(localStorage.cluster);
}
catch (ignore) {
return null;
}
finally {
localStorage.removeItem('cluster');
}
}
constructor($root, $q, $transitions, socketFactory, AgentModal, UserNotifications, Version, ClusterLoginSrv) {
Object.assign(this, {$root, $q, $transitions, socketFactory, AgentModal, UserNotifications, Version, ClusterLoginSrv});
let prevCluster;
this.currentCluster$ = this.connectionSbj
.distinctUntilChanged(({ cluster }) => prevCluster === cluster)
.do(({ cluster }) => prevCluster = cluster);
if (!this.isDemoMode()) {
this.connectionSbj.subscribe({
next: ({cluster}) => {
const version = _.get(cluster, 'clusterVersion');
if (_.isEmpty(version))
return;
this.clusterVersion = version;
}
});
}
}
isDemoMode() {
return this.$root.IgniteDemoMode;
}
available(...sinceVersion) {
return this.Version.since(this.clusterVersion, ...sinceVersion);
}
connect() {
const self = this;
if (nonNil(self.socket))
return;
self.socket = self.socketFactory();
const onDisconnect = () => {
const conn = self.connectionSbj.getValue();
conn.disconnect();
self.connectionSbj.next(conn);
};
self.socket.on('connect_error', onDisconnect);
self.socket.on('disconnect', onDisconnect);
self.socket.on('agents:stat', ({clusters, count}) => {
const conn = self.connectionSbj.getValue();
conn.update(self.isDemoMode(), count, clusters);
self.connectionSbj.next(conn);
});
self.socket.on('cluster:changed', (cluster) => this.updateCluster(cluster));
self.socket.on('user:notifications', (notification) => this.UserNotifications.notification = notification);
}
saveToStorage(cluster = this.connectionSbj.getValue().cluster) {
try {
localStorage.cluster = JSON.stringify(cluster);
} catch (ignore) {
// No-op.
}
}
updateCluster(newCluster) {
const state = this.connectionSbj.getValue();
const oldCluster = _.find(state.clusters, (cluster) => cluster.id === newCluster.id);
if (!_.isNil(oldCluster)) {
oldCluster.nids = newCluster.nids;
oldCluster.addresses = newCluster.addresses;
oldCluster.clusterVersion = newCluster.clusterVersion;
oldCluster.active = newCluster.active;
this.connectionSbj.next(state);
}
}
switchCluster(cluster) {
const state = this.connectionSbj.getValue();
state.updateCluster(cluster);
this.connectionSbj.next(state);
this.saveToStorage(cluster);
}
/**
* @param states
* @returns {ng.IPromise}
*/
awaitConnectionState(...states) {
const defer = this.$q.defer();
this.promises.add(defer);
const subscription = this.connectionSbj.subscribe({
next: ({state}) => {
if (_.includes(states, state))
defer.resolve();
}
});
return defer.promise
.finally(() => {
subscription.unsubscribe();
this.promises.delete(defer);
});
}
awaitCluster() {
return this.awaitConnectionState(State.CONNECTED);
}
awaitAgent() {
return this.awaitConnectionState(State.CONNECTED, State.CLUSTER_DISCONNECTED);
}
/**
* @param {String} backText
* @param {String} [backState]
* @returns {ng.IPromise}
*/
startAgentWatch(backText, backState) {
const self = this;
self.backText = backText;
self.backState = backState;
const conn = self.connectionSbj.getValue();
conn.useConnectedCluster();
self.connectionSbj.next(conn);
this.modalSubscription && this.modalSubscription.unsubscribe();
self.modalSubscription = this.connectionSbj.subscribe({
next: ({state}) => {
switch (state) {
case State.CONNECTED:
case State.CLUSTER_DISCONNECTED:
this.AgentModal.hide();
break;
case State.AGENT_DISCONNECTED:
this.AgentModal.agentDisconnected(self.backText, self.backState);
break;
default:
// Connection to backend is not established yet.
}
}
});
return self.awaitAgent();
}
/**
* @param {String} backText
* @param {String} [backState]
* @returns {ng.IPromise}
*/
startClusterWatch(backText, backState) {
const self = this;
self.backText = backText;
self.backState = backState;
const conn = self.connectionSbj.getValue();
conn.useConnectedCluster();
self.connectionSbj.next(conn);
this.modalSubscription && this.modalSubscription.unsubscribe();
self.modalSubscription = this.connectionSbj.subscribe({
next: ({state}) => {
switch (state) {
case State.CONNECTED:
this.AgentModal.hide();
break;
case State.AGENT_DISCONNECTED:
this.AgentModal.agentDisconnected(self.backText, self.backState);
break;
case State.CLUSTER_DISCONNECTED:
self.AgentModal.clusterDisconnected(self.backText, self.backState);
break;
default:
// Connection to backend is not established yet.
}
}
});
self.$transitions.onExit({}, () => self.stopWatch());
return self.awaitCluster();
}
stopWatch() {
this.modalSubscription && this.modalSubscription.unsubscribe();
this.promises.forEach((promise) => promise.reject('Agent watch stopped.'));
}
/**
*
* @param {String} event
* @param {Object} [payload]
* @returns {ng.IPromise}
* @private
*/
_sendToAgent(event, payload = {}) {
if (!this.socket)
return this.$q.reject('Failed to connect to server');
const latch = this.$q.defer();
const onDisconnect = () => {
this.socket.removeListener('disconnect', onDisconnect);
latch.reject('Connection to server was closed');
};
this.socket.on('disconnect', onDisconnect);
this.socket.emit(event, payload, (err, res) => {
this.socket.removeListener('disconnect', onDisconnect);
if (err)
return latch.reject(err);
latch.resolve(res);
});
return latch.promise;
}
drivers() {
return this._sendToAgent('schemaImport:drivers');
}
/**
* @param {Object} jdbcDriverJar
* @param {Object} jdbcDriverClass
* @param {Object} jdbcUrl
* @param {Object} user
* @param {Object} password
* @returns {Promise}
*/
schemas({jdbcDriverJar, jdbcDriverClass, jdbcUrl, user, password}) {
const info = {user, password};
return this._sendToAgent('schemaImport:schemas', {jdbcDriverJar, jdbcDriverClass, jdbcUrl, info});
}
/**
* @param {Object} jdbcDriverJar
* @param {Object} jdbcDriverClass
* @param {Object} jdbcUrl
* @param {Object} user
* @param {Object} password
* @param {Object} schemas
* @param {Object} tablesOnly
* @returns {ng.IPromise} Promise on list of tables (see org.apache.ignite.schema.parser.DbTable java class)
*/
tables({jdbcDriverJar, jdbcDriverClass, jdbcUrl, user, password, schemas, tablesOnly}) {
const info = {user, password};
return this._sendToAgent('schemaImport:metadata', {jdbcDriverJar, jdbcDriverClass, jdbcUrl, info, schemas, tablesOnly});
}
/**
* @param {String} event
* @param {Object} params
* @returns {Promise}
* @private
*/
_executeOnCurrentCluster(event, params) {
return this.connectionSbj.first().toPromise()
.then(({cluster}) => {
if (_.isNil(cluster))
throw new Error('Failed to execute request on cluster.');
if (cluster.secured) {
return Promise.resolve(this.clustersSecrets.get(cluster.id))
.then((secrets) => {
if (secrets.hasCredentials())
return secrets;
return this.ClusterLoginSrv.askCredentials(secrets)
.then((secrets) => {
this.clustersSecrets.put(cluster.id, secrets);
return secrets;
});
})
.then((secrets) => ({cluster, credentials: secrets.getCredentials()}));
}
return {cluster, credentials: {}};
})
.then(({cluster, credentials}) => {
return this._sendToAgent(event, {clusterId: cluster.id, params, credentials})
.then((res) => {
const {status = SuccessStatus.STATUS_SUCCESS} = res;
switch (status) {
case SuccessStatus.STATUS_SUCCESS:
if (cluster.secured)
this.clustersSecrets.get(cluster.id).sessionToken = res.sessionToken;
if (res.zipped)
return this.pool.postMessage(res.data);
return res;
case SuccessStatus.STATUS_FAILED:
if (res.error.startsWith('Failed to handle request - unknown session token (maybe expired session)')) {
this.clustersSecrets.get(cluster.id).resetSessionToken();
return this._executeOnCurrentCluster(event, params);
}
throw new Error(res.error);
case SuccessStatus.AUTH_FAILED:
this.clustersSecrets.get(cluster.id).resetCredentials();
throw new Error('Failed to authenticate in cluster with provided credentials');
case SuccessStatus.SECURITY_CHECK_FAILED:
throw new Error('Access denied. You are not authorized to access this functionality. Contact your cluster administrator.');
default:
throw new Error('Illegal status in node response');
}
});
});
}
/**
* @param {Boolean} [attr]
* @param {Boolean} [mtr]
* @returns {Promise}
*/
topology(attr = false, mtr = false) {
return this._executeOnCurrentCluster('node:rest', {cmd: 'top', attr, mtr});
}
/**
* @returns {Promise}
*/
metadata() {
return this._executeOnCurrentCluster('node:rest', {cmd: 'metadata'})
.then((caches) => {
let types = [];
const _compact = (className) => {
return className.replace('java.lang.', '').replace('java.util.', '').replace('java.sql.', '');
};
const _typeMapper = (meta, typeName) => {
const maskedName = _.isEmpty(meta.cacheName) ? '<default>' : meta.cacheName;
let fields = meta.fields[typeName];
let columns = [];
for (const fieldName in fields) {
if (fields.hasOwnProperty(fieldName)) {
const fieldClass = _compact(fields[fieldName]);
columns.push({
type: 'field',
name: fieldName,
clazz: fieldClass,
system: fieldName === '_KEY' || fieldName === '_VAL',
cacheName: meta.cacheName,
typeName,
maskedName
});
}
}
const indexes = [];
for (const index of meta.indexes[typeName]) {
fields = [];
for (const field of index.fields) {
fields.push({
type: 'index-field',
name: field,
order: index.descendings.indexOf(field) < 0,
unique: index.unique,
cacheName: meta.cacheName,
typeName,
maskedName
});
}
if (fields.length > 0) {
indexes.push({
type: 'index',
name: index.name,
children: fields,
cacheName: meta.cacheName,
typeName,
maskedName
});
}
}
columns = _.sortBy(columns, 'name');
if (nonEmpty(indexes)) {
columns = columns.concat({
type: 'indexes',
name: 'Indexes',
cacheName: meta.cacheName,
typeName,
maskedName,
children: indexes
});
}
return {
type: 'type',
cacheName: meta.cacheName || '',
typeName,
maskedName,
children: columns
};
};
for (const meta of caches) {
const cacheTypes = meta.types.map(_typeMapper.bind(null, meta));
if (!_.isEmpty(cacheTypes))
types = types.concat(cacheTypes);
}
return types;
});
}
/**
* @param {String} taskId
* @param {Array.<String>|String} nids
* @param {Array.<Object>} args
*/
visorTask(taskId, nids, ...args) {
args = _.map(args, (arg) => maskNull(arg));
nids = _.isArray(nids) ? nids.join(';') : maskNull(nids);
return this._executeOnCurrentCluster('node:visor', {taskId, nids, args});
}
/**
* @param {String} nid Node id.
* @param {String} cacheName Cache name.
* @param {String} [query] Query if null then scan query.
* @param {Boolean} nonCollocatedJoins Flag whether to execute non collocated joins.
* @param {Boolean} enforceJoinOrder Flag whether enforce join order is enabled.
* @param {Boolean} replicatedOnly Flag whether query contains only replicated tables.
* @param {Boolean} local Flag whether to execute query locally.
* @param {Number} pageSz
* @param {Boolean} [lazy] query flag.
* @param {Boolean} [collocated] Collocated query.
* @returns {Promise}
*/
querySql(nid, cacheName, query, nonCollocatedJoins, enforceJoinOrder, replicatedOnly, local, pageSz, lazy = false, collocated = false) {
if (this.available(IGNITE_2_0)) {
let args = [cacheName, query, nonCollocatedJoins, enforceJoinOrder, replicatedOnly, local, pageSz];
if (this.available(...COLLOCATED_QUERY_SINCE))
args = [...args, lazy, collocated];
else if (this.available(...LAZY_QUERY_SINCE))
args = [...args, lazy];
return this.visorTask('querySqlX2', nid, ...args).then(({error, result}) => {
if (_.isEmpty(error))
return result;
return Promise.reject(error);
});
}
cacheName = _.isEmpty(cacheName) ? null : cacheName;
let queryPromise;
if (enforceJoinOrder)
queryPromise = this.visorTask('querySqlV3', nid, cacheName, query, nonCollocatedJoins, enforceJoinOrder, local, pageSz);
else if (nonCollocatedJoins)
queryPromise = this.visorTask('querySqlV2', nid, cacheName, query, nonCollocatedJoins, local, pageSz);
else
queryPromise = this.visorTask('querySql', nid, cacheName, query, local, pageSz);
return queryPromise
.then(({key, value}) => {
if (_.isEmpty(key))
return value;
return Promise.reject(key);
});
}
/**
* @param {String} nid Node id.
* @param {Number} queryId
* @param {Number} pageSize
* @returns {Promise}
*/
queryNextPage(nid, queryId, pageSize) {
if (this.available(IGNITE_2_0))
return this.visorTask('queryFetchX2', nid, queryId, pageSize);
return this.visorTask('queryFetch', nid, queryId, pageSize);
}
/**
* @param {String} nid Node id.
* @param {String} cacheName Cache name.
* @param {String} [query] Query if null then scan query.
* @param {Boolean} nonCollocatedJoins Flag whether to execute non collocated joins.
* @param {Boolean} enforceJoinOrder Flag whether enforce join order is enabled.
* @param {Boolean} replicatedOnly Flag whether query contains only replicated tables.
* @param {Boolean} local Flag whether to execute query locally.
* @param {Boolean} lazy query flag.
* @param {Boolean} collocated Collocated query.
* @returns {Promise}
*/
querySqlGetAll(nid, cacheName, query, nonCollocatedJoins, enforceJoinOrder, replicatedOnly, local, lazy, collocated) {
// Page size for query.
const pageSz = 1024;
const fetchResult = (acc) => {
if (!acc.hasMore)
return acc;
return this.queryNextPage(acc.responseNodeId, acc.queryId, pageSz)
.then((res) => {
acc.rows = acc.rows.concat(res.rows);
acc.hasMore = res.hasMore;
return fetchResult(acc);
});
};
return this.querySql(nid, cacheName, query, nonCollocatedJoins, enforceJoinOrder, replicatedOnly, local, pageSz, lazy, collocated)
.then(fetchResult);
}
/**
* @param {String} nid Node id.
* @param {Number} [queryId]
* @returns {Promise}
*/
queryClose(nid, queryId) {
if (this.available(IGNITE_2_0)) {
return this.visorTask('queryCloseX2', nid, 'java.util.Map', 'java.util.UUID', 'java.util.Collection',
nid + '=' + queryId);
}
return this.visorTask('queryClose', nid, nid, queryId);
}
/**
* @param {String} nid Node id.
* @param {String} cacheName Cache name.
* @param {String} filter Filter text.
* @param {Boolean} regEx Flag whether filter by regexp.
* @param {Boolean} caseSensitive Case sensitive filtration.
* @param {Boolean} near Scan near cache.
* @param {Boolean} local Flag whether to execute query locally.
* @param {Number} pageSize Page size.
* @returns {Promise}
*/
queryScan(nid, cacheName, filter, regEx, caseSensitive, near, local, pageSize) {
if (this.available(IGNITE_2_0)) {
return this.visorTask('queryScanX2', nid, cacheName, filter, regEx, caseSensitive, near, local, pageSize)
.then(({error, result}) => {
if (_.isEmpty(error))
return result;
return Promise.reject(error);
});
}
/** Prefix for node local key for SCAN near queries. */
const SCAN_CACHE_WITH_FILTER = 'VISOR_SCAN_CACHE_WITH_FILTER';
/** Prefix for node local key for SCAN near queries. */
const SCAN_CACHE_WITH_FILTER_CASE_SENSITIVE = 'VISOR_SCAN_CACHE_WITH_FILTER_CASE_SENSITIVE';
const prefix = caseSensitive ? SCAN_CACHE_WITH_FILTER_CASE_SENSITIVE : SCAN_CACHE_WITH_FILTER;
const query = `${prefix}${filter}`;
return this.querySql(nid, cacheName, query, false, false, false, local, pageSize);
}
/**
* @param {String} nid Node id.
* @param {String} cacheName Cache name.
* @param {String} filter Filter text.
* @param {Boolean} regEx Flag whether filter by regexp.
* @param {Boolean} caseSensitive Case sensitive filtration.
* @param {Boolean} near Scan near cache.
* @param {Boolean} local Flag whether to execute query locally.
* @returns {Promise}
*/
queryScanGetAll(nid, cacheName, filter, regEx, caseSensitive, near, local) {
// Page size for query.
const pageSz = 1024;
const fetchResult = (acc) => {
if (!acc.hasMore)
return acc;
return this.queryNextPage(acc.responseNodeId, acc.queryId, pageSz)
.then((res) => {
acc.rows = acc.rows.concat(res.rows);
acc.hasMore = res.hasMore;
return fetchResult(acc);
});
};
return this.queryScan(nid, cacheName, filter, regEx, caseSensitive, near, local, pageSz)
.then(fetchResult);
}
/**
* Change cluster active state.
*
* @returns {Promise}
*/
toggleClusterState() {
const state = this.connectionSbj.getValue();
const active = !state.cluster.active;
return this.visorTask('toggleClusterState', null, active)
.then(() => state.updateCluster(Object.assign(state.cluster, { active })));
}
hasCredentials(clusterId) {
return this.clustersSecrets.get(clusterId).hasCredentials();
}
}
| irudyak/ignite | modules/web-console/frontend/app/modules/agent/AgentManager.service.js | JavaScript | apache-2.0 | 26,984 |
/*******************************************************************************
*
* Copyright FUJITSU LIMITED 2017
*
* Author: Enes Sejfi
*
* Creation Date: 11.06.2012
*
*******************************************************************************/
package org.oscm.domobjects;
import java.util.LinkedList;
import java.util.List;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import javax.persistence.OrderBy;
import org.oscm.types.enumtypes.FillinCriterion;
/**
* The landingpage represents the startpage with per default shown services.
* This landing page is used for large public marketplaces that has typical
* features like an app store (reviews, categories, tag cloud, etc)
*/
@Entity
public class PublicLandingpage extends
DomainObjectWithVersioning<PublicLandingpageData> {
private static final long serialVersionUID = 1098523770937046486L;
public static final FillinCriterion DEFAULT_FILLINCRITERION = FillinCriterion
.getDefault();
public static final int DEFAULT_NUMBERSERVICES = 6;
public PublicLandingpage() {
super();
dataContainer = new PublicLandingpageData();
}
@OneToOne(mappedBy = "publicLandingpage", fetch = FetchType.LAZY, optional = false)
private Marketplace marketplace;
@OneToMany(mappedBy = "landingpage", fetch = FetchType.LAZY, cascade = {
CascadeType.PERSIST, CascadeType.REMOVE })
@OrderBy(value = "dataContainer.position ASC")
private List<LandingpageProduct> landingpageProducts = new LinkedList<LandingpageProduct>();
public Marketplace getMarketplace() {
return marketplace;
}
public void setMarketplace(Marketplace marketplace) {
this.marketplace = marketplace;
}
public List<LandingpageProduct> getLandingpageProducts() {
return landingpageProducts;
}
public void setLandingpageProducts(
List<LandingpageProduct> landingpageProducts) {
this.landingpageProducts = landingpageProducts;
}
public int getNumberServices() {
return dataContainer.getNumberServices();
}
public void setNumberServices(int numberServices) {
this.dataContainer.setNumberServices(numberServices);
}
public FillinCriterion getFillinCriterion() {
return dataContainer.getFillinCriterion();
}
public void setFillinCriterion(FillinCriterion fillinCriterion) {
this.dataContainer.setFillinCriterion(fillinCriterion);
}
public static PublicLandingpage newDefault() {
PublicLandingpage defaultLandingPage = new PublicLandingpage();
defaultLandingPage.setDefaults();
return defaultLandingPage;
}
public void setDefaults() {
this.setFillinCriterion(DEFAULT_FILLINCRITERION);
this.setNumberServices(DEFAULT_NUMBERSERVICES);
this.getLandingpageProducts().clear();
}
}
| opetrovski/development | oscm-domainobjects/javasrc/org/oscm/domobjects/PublicLandingpage.java | Java | apache-2.0 | 3,411 |
package interfaces;
public class Employee implements Comparable<Employee>{
private String name;
private double salary;
public Employee(String n,double s){
name = n;
salary = s;
}
public String getName(){
return name;
}
public double getSalary(){
return salary;
}
public void raiseSalary(double byPercent){
double raise = salary*byPercent/100;
salary+=raise;
}
public int compareTo(Employee other){
return Double.compare(salary, other.salary);
}
}
| lovelan/androidstudy | interfaces/src/interfaces/Employee.java | Java | apache-2.0 | 477 |
/*
Copyright 2012, Strategic Gains, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.strategicgains.eventing.hazelcast;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import com.hazelcast.core.ITopic;
import com.strategicgains.eventing.EventHandler;
import com.strategicgains.eventing.EventTransport;
/**
* @author toddf
* @since Oct 18, 2012
*/
public class HazelcastEventTransport
implements EventTransport
{
private ITopic<Object> topic;
private Map<EventHandler, String> subscriptions = new ConcurrentHashMap<EventHandler, String>();
protected HazelcastEventTransport()
{
super();
}
public HazelcastEventTransport(ITopic<Object> topic)
{
this();
setTopic(topic);
}
protected void setTopic(ITopic<Object> aTopic)
{
this.topic = aTopic;
}
@Override
public void publish(Object event)
{
topic.publish(event);
}
@Override
public void shutdown()
{
topic.destroy();
}
@Override
public boolean subscribe(EventHandler handler)
{
String listenerId = topic.addMessageListener(new EventHandlerAdapter(handler));
subscriptions.put(handler, listenerId);
return true;
}
@Override
public boolean unsubscribe(EventHandler handler)
{
String listenerId = subscriptions.get(handler);
if (listenerId != null)
{
return topic.removeMessageListener(listenerId);
}
return false;
}
}
| tfredrich/Domain-Eventing | hazelcast/src/java/com/strategicgains/eventing/hazelcast/HazelcastEventTransport.java | Java | apache-2.0 | 1,870 |
export function getScreenBrightness () {
return {
errMsg: 'getScreenBrightness:ok',
value: plus.screen.getBrightness(false)
}
}
export function setScreenBrightness ({
value
} = {}) {
plus.screen.setBrightness(value, false)
return {
errMsg: 'setScreenBrightness:ok'
}
}
export function setKeepScreenOn ({
keepScreenOn
} = {}) {
plus.device.setWakelock(!!keepScreenOn)
return {
errMsg: 'setKeepScreenOn:ok'
}
}
| dcloudio/uni-app | src/platforms/app-plus/service/api/device/brightness.js | JavaScript | apache-2.0 | 471 |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
var assert = require('assert');
// Test includes
var testutil = require('../../util/util');
var queuetestutil = require('../../framework/queue-test-utils');
// Lib includes
var azure = testutil.libRequire('azure');
var azureutil = testutil.libRequire('common/lib/util/util');
var Constants = azure.Constants;
var ServiceClientConstants = azure.ServiceClientConstants;
var HttpConstants = Constants.HttpConstants;
var queueService;
var queueNames = [];
var queueNamesPrefix = 'queue';
var testPrefix = 'queueservice-tests';
var tableService;
var suiteUtil;
suite('queueservice-tests', function () {
suiteSetup(function (done) {
queueService = azure.createQueueService()
.withFilter(new azure.ExponentialRetryPolicyFilter());
suiteUtil = queuetestutil.createQueueTestUtils(queueService, testPrefix);
suiteUtil.setupSuite(done);
});
suiteTeardown(function (done) {
suiteUtil.teardownSuite(done);
});
setup(function (done) {
suiteUtil.setupTest(done);
});
teardown(function (done) {
suiteUtil.teardownTest(done);
});
test('GetServiceProperties', function (done) {
queueService.getServiceProperties(function (error, serviceProperties) {
assert.equal(error, null);
assert.notEqual(serviceProperties, null);
if (serviceProperties) {
assert.notEqual(serviceProperties.Logging, null);
if (serviceProperties.Logging) {
assert.notEqual(serviceProperties.Logging.RetentionPolicy);
assert.notEqual(serviceProperties.Logging.Version);
}
if (serviceProperties.Metrics) {
assert.notEqual(serviceProperties.Metrics, null);
assert.notEqual(serviceProperties.Metrics.RetentionPolicy);
assert.notEqual(serviceProperties.Metrics.Version);
}
}
done();
});
});
test('SetServiceProperties', function (done) {
queueService.getServiceProperties(function (error, serviceProperties) {
assert.equal(error, null);
serviceProperties.Logging.Read = true;
queueService.setServiceProperties(serviceProperties, function (error2) {
assert.equal(error2, null);
queueService.getServiceProperties(function (error3, serviceProperties2) {
assert.equal(error3, null);
assert.equal(serviceProperties2.Logging.Read, true);
done();
});
});
});
});
test('CreateQueue', function (done) {
var queueName = testutil.generateId(queueNamesPrefix, queueNames, suiteUtil.isMocked);
var metadata = { 'class': 'test' };
// Create
queueService.createQueue(queueName, { metadata: metadata }, function (createError, queue, createResponse) {
assert.equal(createError, null);
assert.notEqual(queue, null);
assert.ok(createResponse.isSuccessful);
assert.equal(createResponse.statusCode, HttpConstants.HttpResponseCodes.Created);
assert.ok(queue);
if (createResponse.queue) {
assert.ok(queue.name);
assert.equal(queue.name, queueName);
assert.ok(queue.metadata);
assert.equal(queue.metadata['class'], metadata['class']);
}
// Get
queueService.getQueueMetadata(queueName, function (getError, getQueue, getResponse) {
assert.equal(getError, null);
assert.ok(getResponse.isSuccessful);
assert.equal(getResponse.statusCode, HttpConstants.HttpResponseCodes.Ok);
assert.ok(getQueue);
if (getQueue) {
assert.ok(getQueue.name);
assert.equal(getQueue.name, queueName);
assert.ok(getQueue.metadata);
assert.equal(getQueue.metadata['class'], metadata['class']);
}
// Delete
queueService.deleteQueue(queueName, function (deleteError, deleted, deleteResponse) {
assert.equal(deleteError, null);
assert.equal(deleted, true);
assert.ok(deleteResponse.isSuccessful);
assert.equal(deleteResponse.statusCode, HttpConstants.HttpResponseCodes.NoContent);
done();
});
});
});
});
test('CreateQueueIfNotExists', function (done) {
var queueName = testutil.generateId(queueNamesPrefix, queueNames, suiteUtil.isMocked);
var metadata = { 'class': 'test' };
// Create
queueService.createQueue(queueName, { metadata: metadata }, function (createError, queue, createResponse) {
assert.equal(createError, null);
assert.notEqual(queue, null);
assert.ok(createResponse.isSuccessful);
assert.equal(createResponse.statusCode, HttpConstants.HttpResponseCodes.Created);
assert.ok(queue);
if (createResponse.queue) {
assert.ok(queue.name);
assert.equal(queue.name, queueName);
assert.ok(queue.metadata);
assert.equal(queue.metadata['class'], metadata['class']);
}
// Try creating again
queueService.createQueueIfNotExists(queueName, { metadata: metadata }, function (createError2, queueCreated2) {
assert.equal(createError2, null);
assert.equal(queueCreated2, false);
done();
});
});
});
test('ListQueues', function (done) {
var queueName1 = testutil.generateId(queueNamesPrefix, queueNames, suiteUtil.isMocked);
var queueName2 = testutil.generateId(queueNamesPrefix, queueNames, suiteUtil.isMocked);
var metadata = { 'class': 'test' };
queueService.listQueues({ 'include': 'metadata' }, function (listErrorEmpty, queuesEmpty) {
assert.equal(listErrorEmpty, null);
assert.notEqual(queuesEmpty, null);
if (queuesEmpty) {
assert.equal(queuesEmpty.length, 0);
}
queueService.createQueue(queueName1, function (createError1, queue1, createResponse1) {
assert.equal(createError1, null);
assert.notEqual(queue1, null);
assert.ok(createResponse1.isSuccessful);
assert.equal(createResponse1.statusCode, HttpConstants.HttpResponseCodes.Created);
queueService.createQueue(queueName2, { metadata: metadata }, function (createError2, queue2, createResponse2) {
assert.equal(createError2, null);
assert.notEqual(queue2, null);
assert.ok(createResponse2.isSuccessful);
assert.equal(createResponse2.statusCode, HttpConstants.HttpResponseCodes.Created);
queueService.listQueues({ 'include': 'metadata' }, function (listError, queues, nextMarker, listResponse) {
assert.equal(listError, null);
assert.notEqual(queues, null);
assert.ok(listResponse.isSuccessful);
assert.equal(listResponse.statusCode, HttpConstants.HttpResponseCodes.Ok);
assert.ok(queues);
var entries = 0;
for (var queue in queues) {
var currentQueue = queues[queue];
if (currentQueue.name === queueName1) {
entries += 1;
}
else if (currentQueue.name === queueName2) {
entries += 2;
assert.equal(currentQueue.metadata['class'], metadata['class']);
}
}
assert.equal(entries, 3);
done();
});
});
});
});
});
test('CreateMessage', function (done) {
var queueName = testutil.generateId(queueNamesPrefix, queueNames, suiteUtil.isMocked);
var messageText1 = 'hi there';
var messageText2 = 'bye there';
// Create Queue
queueService.createQueue(queueName, function (createError1, queue1, createResponse1) {
assert.equal(createError1, null);
assert.notEqual(queue1, null);
assert.ok(createResponse1.isSuccessful);
assert.equal(createResponse1.statusCode, HttpConstants.HttpResponseCodes.Created);
// Create message
queueService.createMessage(queueName, messageText1, function (createMessageError, message, createMessageResponse) {
assert.equal(createMessageError, null);
assert.ok(createMessageResponse.isSuccessful);
assert.equal(createMessageResponse.statusCode, HttpConstants.HttpResponseCodes.Created);
// Create another message
queueService.createMessage(queueName, messageText2, function (createMessageError2, message2, createMessageResponse2) {
assert.equal(createMessageError, null);
assert.ok(createMessageResponse2.isSuccessful);
assert.equal(createMessageResponse2.statusCode, HttpConstants.HttpResponseCodes.Created);
// Peek message
queueService.peekMessages(queueName, function (peekError, queueMessages, peekResponse) {
assert.equal(peekError, null);
assert.notEqual(queueMessages, null);
var queueMessage = queueMessages[0];
if (queueMessage) {
assert.ok(queueMessage['messageid']);
assert.ok(queueMessage['insertiontime']);
assert.ok(queueMessage['expirationtime']);
assert.equal(queueMessage.messagetext, messageText1);
}
assert.ok(peekResponse.isSuccessful);
assert.equal(peekResponse.statusCode, HttpConstants.HttpResponseCodes.Ok);
// Get messages
queueService.getMessages(queueName, function (getError, getQueueMessages, getResponse) {
assert.equal(getError, null);
assert.notEqual(getQueueMessages, null);
assert.equal(getQueueMessages.length, 1);
assert.ok(getResponse.isSuccessful);
assert.equal(getResponse.statusCode, HttpConstants.HttpResponseCodes.Ok);
var getQueueMessage = getQueueMessages[0];
assert.equal(getQueueMessage.messagetext, messageText1);
// Delete message
queueService.deleteMessage(queueName, getQueueMessage.messageid, getQueueMessage.popreceipt, function (deleteError, deleted, deleteResponse) {
assert.equal(deleteError, null);
assert.equal(deleted, true);
assert.ok(deleteResponse.isSuccessful);
assert.equal(deleteResponse.statusCode, HttpConstants.HttpResponseCodes.NoContent);
// Get messages again
queueService.getMessages(queueName, function (getError2, getQueueMessages2, getResponse2) {
assert.equal(getError2, null);
assert.notEqual(getQueueMessages2, null);
assert.ok(getResponse2.isSuccessful);
assert.equal(getResponse2.statusCode, HttpConstants.HttpResponseCodes.Ok);
var getQueueMessage2 = getQueueMessages2[0];
assert.equal(getQueueMessage2.messagetext, messageText2);
// Clear messages
queueService.clearMessages(queueName, function (clearError, clearResponse) {
assert.equal(clearError, null);
assert.ok(clearResponse.isSuccessful);
assert.equal(clearResponse.statusCode, HttpConstants.HttpResponseCodes.NoContent);
// Get message again should yield empty
queueService.getMessages(queueName, function (getError3, getQueueMessage3, getResponse3) {
assert.equal(getError3, null);
assert.ok(getResponse3.isSuccessful);
assert.equal(getResponse3.statusCode, HttpConstants.HttpResponseCodes.Ok);
assert.equal(getQueueMessage3.length, 0);
done();
});
});
});
});
});
});
});
});
});
});
test('CreateEmptyMessage', function (done) {
var queueName = testutil.generateId(queueNamesPrefix, queueNames, suiteUtil.isMocked);
// Create Queue
queueService.createQueue(queueName, function (createError1) {
assert.equal(createError1, null);
// Create message
queueService.createMessage(queueName, '', function (createMessageError, message, createMessageResponse) {
assert.equal(createMessageError, null);
assert.equal(createMessageResponse.statusCode, HttpConstants.HttpResponseCodes.Created);
done();
});
});
});
test('SetQueueMetadataName', function (done) {
var queueName = testutil.generateId(queueNamesPrefix, queueNames, suiteUtil.isMocked);
var metadata = { '\Uc8fc\Uba39\Uc774\Uc6b4\Ub2e4': 'test' };
queueService.createQueue(queueName, function (createError) {
assert.equal(createError, null);
// unicode headers are valid
queueService.setQueueMetadata(queueName, metadata, function (setError) {
assert.equal(setError, null);
done();
});
});
});
test('SetQueueMetadata', function (done) {
var queueName = testutil.generateId(queueNamesPrefix, queueNames, suiteUtil.isMocked);
var metadata = { 'class': 'test' };
queueService.createQueue(queueName, function (createError) {
assert.equal(createError, null);
queueService.setQueueMetadata(queueName, metadata, function (setError) {
assert.equal(setError, null);
queueService.getQueueMetadata(queueName, function (getError, queue) {
assert.equal(getError, null);
assert.notEqual(queue, null);
if (queue) {
assert.notEqual(queue.metadata, null);
assert.equal(queue.metadata.class, 'test');
done();
}
});
});
});
});
test('GetMessages', function (done) {
var queueName = testutil.generateId(queueNamesPrefix, queueNames, suiteUtil.isMocked);
queueService.createQueue(queueName, function (createError) {
assert.equal(createError, null);
queueService.getMessages(queueName, function (error, emptyMessages) {
assert.equal(error, null);
assert.notEqual(emptyMessages, null);
assert.equal(emptyMessages.length, 0);
queueService.createMessage(queueName, 'msg1', function (error1) {
assert.equal(error1, null);
queueService.createMessage(queueName, 'msg2', function (error2) {
assert.equal(error2, null);
queueService.getMessages(queueName, { peekonly: true }, function (error3, messages) {
assert.equal(error3, null);
assert.notEqual(messages, null);
if (messages) {
// By default only one is returned
assert.equal(messages.length, 1);
assert.equal(messages[0].messagetext, 'msg1');
}
queueService.getMessages(queueName, { numofmessages: 2 }, function (error4, messages2) {
assert.equal(error4, null);
assert.notEqual(messages2, null);
if (messages2) {
assert.equal(messages2.length, 2);
}
done();
});
});
});
});
});
});
});
test('UpdateMessage', function (done) {
var queueName = testutil.generateId(queueNamesPrefix, queueNames, suiteUtil.isMocked);
queueService.createQueue(queueName, function (error) {
assert.equal(error, null);
queueService.createMessage(queueName, 'hi there', function (error2) {
assert.equal(error2, null);
queueService.getMessages(queueName, function (error3, messages) {
assert.equal(error2, null);
assert.notEqual(messages, null);
var message = messages[0];
queueService.updateMessage(queueName, message.messageid, message.popreceipt, 10, { messagetext: 'bye there' }, function (error4) {
assert.equal(error4, null);
done();
});
});
});
});
});
test('UpdateMessageEncodingPopReceipt', function (done) {
var queueName = testutil.generateId(queueNamesPrefix, queueNames, suiteUtil.isMocked);
// no messages in the queue try to update a message should give fail to update instead of blowing up on authentication
queueService.updateMessage(queueName, 'mymsg', 'AgAAAAEAAACucgAAvMW8+dqjzAE=', 10, { messagetext: 'bye there' }, function (error) {
assert.notEqual(error, null);
assert.equal(error.code, Constants.QueueErrorCodeStrings.QUEUE_NOT_FOUND);
done();
});
});
test('storageConnectionStrings', function (done) {
var key = 'AhlzsbLRkjfwObuqff3xrhB2yWJNh1EMptmcmxFJ6fvPTVX3PZXwrG2YtYWf5DPMVgNsteKStM5iBLlknYFVoA==';
var connectionString = 'DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=' + key;
var queueService = azure.createQueueService(connectionString);
assert.equal(queueService.storageAccount, 'myaccount');
assert.equal(queueService.storageAccessKey, key);
assert.equal(queueService.protocol, 'https:');
done();
});
test('storageConnectionStringsDevStore', function (done) {
var connectionString = 'UseDevelopmentStorage=true';
var queueService = azure.createQueueService(connectionString);
assert.equal(queueService.storageAccount, ServiceClientConstants.DEVSTORE_STORAGE_ACCOUNT);
assert.equal(queueService.storageAccessKey, ServiceClientConstants.DEVSTORE_STORAGE_ACCESS_KEY);
assert.equal(queueService.protocol, 'http:');
assert.equal(queueService.host, '127.0.0.1');
assert.equal(queueService.port, '10001');
done();
});
});
| jmspring/azure-sdk-for-node | test/services/queue/queueservice-tests.js | JavaScript | apache-2.0 | 18,029 |
'''
@author: Dallas Fraser
@date: 2015-08-25
@organization: MLSB API
@summary: The basic league API
'''
from flask_restful import Resource, reqparse
from flask import Response, request
from json import dumps
from api import DB
from api.model import League
from api.authentication import requires_admin
from api.errors import LeagueDoesNotExist
from api.variables import PAGE_SIZE
from api.routes import Routes
from api.helper import pagination_response
from api.cached_items import handle_table_change
from api.tables import Tables
parser = reqparse.RequestParser()
parser.add_argument('league_name', type=str)
post_parser = reqparse.RequestParser()
post_parser.add_argument('league_name', type=str, required=True)
class LeagueAPI(Resource):
def get(self, league_id):
"""
GET request for League Object matching given league_id
Route: Route['league']/<league_id: int>
Returns:
if found
status: 200
mimetype: application/json
data: {league_id:int, league_name:string}
otherwise
status: 404
mimetype: application/json
data: None
"""
# expose a single League
entry = League.query.get(league_id)
if entry is None:
raise LeagueDoesNotExist(payload={'details': league_id})
response = Response(dumps(entry.json()), status=200,
mimetype="application/json")
return response
@requires_admin
def delete(self, league_id):
"""
DELETE request for League
Route: Route['league']/<league_id: int>
Returns:
if found
status: 200
mimetype: application/json
data: None
otherwise
status: 404
mimetype: application/json
data: None
"""
# delete a single user
league = League.query.get(league_id)
if league is None:
raise LeagueDoesNotExist(payload={'details': league_id})
DB.session.delete(league)
DB.session.commit()
response = Response(dumps(None), status=200,
mimetype="application/json")
handle_table_change(Tables.LEAGUE, item=league.json())
return response
@requires_admin
def put(self, league_id):
"""
PUT request for league
Route: Route['league']/<league_id: int>
Parameters :
league_name: The league's name (string)
Returns:
if found and successful
status: 200
mimetype: application/json
data: None
if found but not successful
status: IFSC
mimetype: application/json
data: None
otherwise
status: 404
mimetype: application/json
data: None
"""
# update a single user
args = parser.parse_args()
league = League.query.get(league_id)
league_name = None
if league is None:
raise LeagueDoesNotExist(payload={'details': league_id})
if args['league_name']:
league_name = args['league_name']
league.update(league_name)
DB.session.commit()
response = Response(dumps(None), status=200,
mimetype="application/json")
handle_table_change(Tables.LEAGUE, item=league.json())
return response
def option(self):
return {'Allow': 'PUT'}, 200, \
{'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'PUT,GET'}
class LeagueListAPI(Resource):
def get(self):
"""
GET request for League List
Route: Route['league']
Parameters :
league_name: The league's name (string)
Returns:
status: 200
mimetype: application/json
data:
tournaments: [{league_id:int,
league_name:string,
},{...}
]
"""
# return a pagination of leagues
page = request.args.get('page', 1, type=int)
pagination = League.query.paginate(page, PAGE_SIZE, False)
result = pagination_response(pagination, Routes['league'])
resp = Response(dumps(result), status=200,
mimetype="application/json")
return resp
@requires_admin
def post(self):
"""
POST request for League List
Route: Route['league']
Parameters :
league_name: The league's name (string)
Returns:
if successful
status: 200
mimetype: application/json
data: the created user league id (int)
if missing required parameter
status: 400
mimetype: application/json
data: the created user league id (int)
if invalid parameter
status: IFSC
mimetype: application/json
data: the created user league id (int)
"""
# create a new user
args = post_parser.parse_args()
league_name = None
if args['league_name']:
league_name = args['league_name']
league = League(league_name)
DB.session.add(league)
DB.session.commit()
result = league.id
handle_table_change(Tables.LEAGUE, item=league.json())
return Response(dumps(result), status=201,
mimetype="application/json")
def option(self):
return {'Allow': 'PUT'}, 200, \
{'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'PUT,GET'}
| fras2560/mlsb-platform | api/basic/league.py | Python | apache-2.0 | 6,180 |
package com.gzsll.hupu.ui.thread.list;
import android.support.annotation.NonNull;
import android.text.TextUtils;
import android.view.View;
import com.gzsll.hupu.api.forum.ForumApi;
import com.gzsll.hupu.api.game.GameApi;
import com.gzsll.hupu.bean.AttendStatusData;
import com.gzsll.hupu.bean.Search;
import com.gzsll.hupu.bean.SearchData;
import com.gzsll.hupu.bean.SearchResult;
import com.gzsll.hupu.bean.ThreadListData;
import com.gzsll.hupu.bean.ThreadListResult;
import com.gzsll.hupu.components.storage.UserStorage;
import com.gzsll.hupu.data.ThreadRepository;
import com.gzsll.hupu.db.Forum;
import com.gzsll.hupu.db.ForumDao;
import com.gzsll.hupu.db.Thread;
import com.gzsll.hupu.injector.PerActivity;
import com.gzsll.hupu.util.ToastUtil;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import javax.inject.Inject;
import rx.Observable;
import rx.Subscriber;
import rx.Subscription;
import rx.android.schedulers.AndroidSchedulers;
import rx.functions.Action1;
import rx.functions.Func1;
import rx.schedulers.Schedulers;
import rx.subjects.PublishSubject;
/**
* Created by sll on 2016/3/9.
*/
@PerActivity
public class ThreadListPresenter implements ThreadListContract.Presenter {
private String fid;
private ThreadRepository mThreadRepository;
private GameApi mGameApi;
private UserStorage mUserStorage;
private ForumApi mForumApi;
private ForumDao mForumDao;
private boolean isFirst = true;
private PublishSubject<List<Thread>> mThreadSubject;
private ThreadListContract.View mThreadListView;
private List<Thread> threads = new ArrayList<>();
private String lastTid = "";
private String lastTamp = "";
private String type;
private int pageIndex;
private int loadType = TYPE_LIST;
private String key;
private boolean hasNextPage = true;
private boolean isAttention = false;
private static final int TYPE_LIST = 1;
private static final int TYPE_SEARCH = 2;
private Subscription mSubscription;
@Inject
public ThreadListPresenter(String fid, ThreadRepository mThreadRepository, GameApi mGameApi,
UserStorage mUserStorage, ForumApi mForumApi, ForumDao mForumDao) {
this.fid = fid;
this.mThreadRepository = mThreadRepository;
this.mGameApi = mGameApi;
this.mUserStorage = mUserStorage;
this.mForumApi = mForumApi;
this.mForumDao = mForumDao;
mThreadSubject = PublishSubject.create();
}
@Override
public void onThreadReceive(String type) {
mThreadListView.showLoading();
mThreadListView.onFloatingVisibility(View.VISIBLE);
this.type = type;
loadType = TYPE_LIST;
mThreadRepository.getThreadListObservable(Integer.valueOf(fid), mThreadSubject)
.subscribe(new Action1<List<Thread>>() {
@Override
public void call(List<Thread> threads) {
ThreadListPresenter.this.threads = threads;
if (threads.isEmpty()) {
if (!isFirst) {
mThreadListView.onError("数据加载失败");
}
isFirst = false;
} else {
mThreadListView.showContent();
lastTid = threads.get(threads.size() - 1).getTid();
mThreadListView.renderThreads(threads);
}
}
});
loadThreadList("");
getAttendStatus();
}
@Override
public void onStartSearch(String key, int page) {
if (TextUtils.isEmpty(key)) {
mThreadListView.showToast("搜索词不能为空");
return;
}
mThreadListView.showLoading();
mThreadListView.onFloatingVisibility(View.GONE);
pageIndex = page;
this.key = key;
loadSearchList();
}
private void loadThreadList(final String last) {
mSubscription = mThreadRepository.getThreadsList(fid, last, lastTamp, type, mThreadSubject)
.subscribe(new Action1<ThreadListData>() {
@Override
public void call(ThreadListData threadListData) {
if (threadListData != null && threadListData.result != null) {
ThreadListResult data = threadListData.result;
lastTamp = data.stamp;
hasNextPage = data.nextPage;
if (TextUtils.isEmpty(last)) {
mThreadListView.onScrollToTop();
}
}
mThreadListView.onRefreshCompleted();
mThreadListView.onLoadCompleted(hasNextPage);
}
}, new Action1<Throwable>() {
@Override
public void call(Throwable throwable) {
if (threads.isEmpty()) {
mThreadListView.onError("数据加载失败,请重试");
} else {
mThreadListView.onRefreshCompleted();
mThreadListView.onLoadCompleted(hasNextPage);
ToastUtil.showToast("数据加载失败,请重试");
}
}
});
}
private void loadSearchList() {
loadType = TYPE_SEARCH;
mGameApi.search(key, fid, pageIndex).map(new Func1<SearchData, List<Thread>>() {
@Override
public List<Thread> call(SearchData searchData) {
if (searchData != null) {
if (pageIndex == 1) {
threads.clear();
}
SearchResult result = searchData.result;
hasNextPage = result.hasNextPage == 1;
for (Search search : result.data) {
Thread thread = new Thread();
thread.setFid(search.fid);
thread.setTid(search.id);
thread.setLightReply(Integer.valueOf(search.lights));
thread.setReplies(search.replies);
thread.setUserName(search.username);
thread.setTitle(search.title);
long time = Long.valueOf(search.addtime);
Date date = new Date(time);
SimpleDateFormat format = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
thread.setTime(format.format(date));
threads.add(thread);
}
return threads;
}
return null;
}
}).observeOn(AndroidSchedulers.mainThread()).subscribe(new Action1<List<Thread>>() {
@Override
public void call(List<Thread> threads) {
if (threads == null) {
loadThreadError();
} else {
if (threads.isEmpty()) {
mThreadListView.onEmpty();
} else {
mThreadListView.showContent();
mThreadListView.renderThreads(threads);
mThreadListView.onRefreshCompleted();
mThreadListView.onLoadCompleted(hasNextPage);
if (pageIndex == 1) {
mThreadListView.onScrollToTop();
}
}
}
}
}, new Action1<Throwable>() {
@Override
public void call(Throwable throwable) {
loadThreadError();
}
});
}
private void loadThreadError() {
if (threads.isEmpty()) {
mThreadListView.onError("数据加载失败");
} else {
mThreadListView.showContent();
mThreadListView.onRefreshCompleted();
mThreadListView.onLoadCompleted(true);
mThreadListView.showToast("数据加载失败");
}
}
private void getAttendStatus() {
mForumApi.getAttentionStatus(fid)
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Action1<AttendStatusData>() {
@Override
public void call(AttendStatusData attendStatusData) {
if (attendStatusData != null && attendStatusData.status == 200) {
mThreadListView.renderThreadInfo(attendStatusData.forumInfo);
isAttention = attendStatusData.attendStatus == 1;
mThreadListView.attendStatus(isAttention);
}
}
}, new Action1<Throwable>() {
@Override
public void call(Throwable throwable) {
getForumInfo();
}
});
}
private void getForumInfo() {
Observable.create(new Observable.OnSubscribe<Forum>() {
@Override
public void call(Subscriber<? super Forum> subscriber) {
List<Forum> forumList =
mForumDao.queryBuilder().where(ForumDao.Properties.Fid.eq(fid)).list();
if (!forumList.isEmpty()) {
subscriber.onNext(forumList.get(0));
}
}
})
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Action1<Forum>() {
@Override
public void call(Forum forum) {
if (forum != null && mThreadListView != null) {
mThreadListView.renderThreadInfo(forum);
}
}
});
}
@Override
public void onAttentionClick() {
if (isLogin()) {
if (isAttention) {
delAttention();
} else {
addAttention();
}
}
}
@Override
public void onPostClick() {
if (isLogin()) {
mThreadListView.showPostThreadUi(fid);
}
}
private boolean isLogin() {
if (!mUserStorage.isLogin()) {
mThreadListView.showLoginUi();
return false;
}
return true;
}
private void addAttention() {
mForumApi.addAttention(fid)
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Action1<AttendStatusData>() {
@Override
public void call(AttendStatusData result) {
if (result.status == 200 && result.result == 1) {
mThreadListView.showToast("添加关注成功");
isAttention = result.status == 200;
mThreadListView.attendStatus(isAttention);
}
}
}, new Action1<Throwable>() {
@Override
public void call(Throwable throwable) {
mThreadListView.showToast("添加关注失败,请检查网络后重试");
}
});
}
private void delAttention() {
mForumApi.delAttention(fid)
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Action1<AttendStatusData>() {
@Override
public void call(AttendStatusData result) {
if (result.status == 200 && result.result == 1) {
mThreadListView.showToast("取消关注成功");
isAttention = result.status != 200;
mThreadListView.attendStatus(isAttention);
}
}
}, new Action1<Throwable>() {
@Override
public void call(Throwable throwable) {
mThreadListView.showToast("取消关注失败,请检查网络后重试");
}
});
}
@Override
public void onRefresh() {
mThreadListView.onScrollToTop();
if (loadType == TYPE_LIST) {
loadThreadList("");
} else {
pageIndex = 1;
loadSearchList();
}
}
@Override
public void onReload() {
mThreadListView.showContent();
mThreadListView.showLoading();
if (loadType == TYPE_LIST) {
loadThreadList(lastTid);
} else {
loadSearchList();
}
}
@Override
public void onLoadMore() {
if (!hasNextPage) {
mThreadListView.showToast("没有更多了~");
mThreadListView.onLoadCompleted(false);
return;
}
if (loadType == TYPE_LIST) {
loadThreadList(lastTid);
} else {
pageIndex++;
loadSearchList();
}
}
@Override
public void attachView(@NonNull ThreadListContract.View view) {
mThreadListView = view;
mThreadListView.showProgress();
}
@Override
public void detachView() {
if (mSubscription != null && !mSubscription.isUnsubscribed()) {
mSubscription.unsubscribe();
}
mThreadListView = null;
}
}
| gzsll/TLint | app/src/main/java/com/gzsll/hupu/ui/thread/list/ThreadListPresenter.java | Java | apache-2.0 | 13,803 |
# -*- coding: utf-8 -*-
#
# Copyright 2015 MarkLogic Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0#
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# File History
# ------------
#
# Norman Walsh 05/08/2015 Initial development
"""
Classes for dealing with path namespaces
"""
class PathNamespace:
"""
A database path namespace.
"""
def __init__(self, prefix, namespace_uri):
"""
Create a path namespace.
:param prefix: The prefix to use in field (i.e. 'foo')
:param namespace: The namespace uri (i.e. 'http://bar.com')
"""
self._config = {
'prefix': prefix,
'namespace-uri': namespace_uri
}
def prefix(self):
"""
The prefix.
"""
return self._config['prefix']
def set_prefix(self, prefix):
"""
Set the prefix.
"""
self._config['prefix'] = prefix
return self
def namespace_uri(self):
"""
The namespace URI.
"""
return self._config['namespace-uri']
def set_namespace_uri(self, namespace_uri):
"""
Set the namespace URI.
"""
self._config['namespace-uri'] = namespace_uri
return self
| supriyantomaftuh/python_api | python_api/marklogic/models/database/path.py | Python | apache-2.0 | 1,718 |
package com.planet_ink.coffee_mud.Abilities.Thief;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2004-2016 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Thief_CarefulStep extends ThiefSkill
{
@Override public String ID() { return "Thief_CarefulStep"; }
private final static String localizedName = CMLib.lang().L("Careful Step");
@Override public String name() { return localizedName; }
@Override protected int canAffectCode(){return 0;}
@Override protected int canTargetCode(){return 0;}
@Override public double castingTime(final MOB mob, final List<String> cmds){return CMProps.getSkillActionCost(ID(),CMath.div(CMProps.getIntVar(CMProps.Int.DEFABLETIME),50.0));}
@Override public double combatCastingTime(final MOB mob, final List<String> cmds){return CMProps.getSkillCombatActionCost(ID(),CMath.div(CMProps.getIntVar(CMProps.Int.DEFCOMABLETIME),50.0));}
@Override public int abstractQuality(){return Ability.QUALITY_INDIFFERENT;}
private static final String[] triggerStrings =I(new String[] {"CARESTEP","CAREFULSTEP"});
@Override public String[] triggerStrings(){return triggerStrings;}
@Override public int usageType(){return USAGE_MOVEMENT;}
@Override public int classificationCode() { return Ability.ACODE_SKILL|Ability.DOMAIN_ACROBATIC; }
@Override
public boolean preInvoke(MOB mob, List<String> commands, Physical givenTarget, boolean auto, int asLevel, int secondsElapsed, double actionsRemaining)
{
if(secondsElapsed==0)
{
String dir=CMParms.combine(commands,0);
if(commands.size()>0)
dir=commands.get(commands.size()-1);
final int dirCode=CMLib.directions().getGoodDirectionCode(dir);
if(dirCode<0)
{
mob.tell(L("Step where?"));
return false;
}
if(mob.isInCombat())
{
mob.tell(L("Not while you are fighting!"));
return false;
}
if((mob.location().getRoomInDir(dirCode)==null)||(mob.location().getExitInDir(dirCode)==null))
{
mob.tell(L("Step where?"));
return false;
}
final CMMsg msg=CMClass.getMsg(mob,null,this,auto?CMMsg.MSG_OK_VISUAL:CMMsg.MSG_DELICATE_HANDS_ACT,L("<S-NAME> start(s) walking carefully @x1.",CMLib.directions().getDirectionName(dirCode)));
if(mob.location().okMessage(mob,msg))
mob.location().send(mob,msg);
else
return false;
}
return true;
}
@Override
public boolean invoke(MOB mob, List<String> commands, Physical givenTarget, boolean auto, int asLevel)
{
String dir=CMParms.combine(commands,0);
if(commands.size()>0)
dir=commands.get(commands.size()-1);
final int dirCode=CMLib.directions().getGoodDirectionCode(dir);
if(!preInvoke(mob,commands,givenTarget,auto,asLevel,0,0.0))
return false;
final MOB highestMOB=getHighestLevelMOB(mob,null);
int levelDiff=mob.phyStats().level()+(2*getXLEVELLevel(mob))-getMOBLevel(highestMOB);
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
boolean success=false;
final CMMsg msg=CMClass.getMsg(mob,null,this,auto?CMMsg.MSG_OK_VISUAL:CMMsg.MSG_DELICATE_HANDS_ACT,L("<S-NAME> walk(s) carefully @x1.",CMLib.directions().getDirectionName(dirCode)));
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
if(levelDiff<0)
levelDiff=levelDiff*8;
else
levelDiff=levelDiff*10;
success=proficiencyCheck(mob,levelDiff,auto);
final int oldDex=mob.baseCharStats().getStat(CharStats.STAT_DEXTERITY);
if(success)
mob.baseCharStats().setStat(CharStats.STAT_DEXTERITY,oldDex+100);
mob.recoverCharStats();
CMLib.tracking().walk(mob,dirCode,false,false);
if(oldDex!=mob.baseCharStats().getStat(CharStats.STAT_DEXTERITY))
mob.baseCharStats().setStat(CharStats.STAT_DEXTERITY,oldDex);
mob.recoverCharStats();
}
return success;
}
}
| oriontribunal/CoffeeMud | com/planet_ink/coffee_mud/Abilities/Thief/Thief_CarefulStep.java | Java | apache-2.0 | 5,172 |
# -*- coding: utf-8 -*-
from brutal.core.plugin import cmd
from datetime import datetime
import hladnymatfyzak
DATE_FORMAT = '%d.%m.%Y'
def validate_date_and_args(args):
"""Validates given date"""
if len(args) >= 1:
try:
valid = datetime.strptime(args[0], DATE_FORMAT)
except ValueError:
return None
return datetime(day=valid.day, month=valid.month, year=valid.year)
return datetime.today()
def output_meals(meals):
"""Returns string of meals"""
out = ''
for i, meal in enumerate(meals, start=1):
out += '{0}. {1}€ '.format(i, meal)
out += '\n' if i % 3 == 0 else ''
return out
@cmd
def horna(event):
"""Meals available in horna
Examples:
!horna 24.12.2015
"""
date_obj = validate_date_and_args(event.args)
if date_obj is None:
return 'Invalid date'
meals = hladnymatfyzak.horna(day=date_obj.day, month=date_obj.month,
year=date_obj.year)
return output_meals(meals)
@cmd
def dolna(event):
"""Meals available in dolna
Examples:
!dolna 24.12.2015
"""
date_obj = validate_date_and_args(event.args)
if date_obj is None:
return 'Invalid date'
meals = hladnymatfyzak.dolna(day=date_obj.day, month=date_obj.month,
year=date_obj.year)
return output_meals(meals)
@cmd
def faynfood(event):
"""Meals available in Faynfood
Examples:
!faynfood 24.12.2015
"""
date_obj = validate_date_and_args(event.args)
if date_obj is None:
return 'Invalid date'
meals = hladnymatfyzak.ffood('faynfood', day=date_obj.day,
month=date_obj.month, year=date_obj.year)
return output_meals(meals)
@cmd
def freefood(event):
"""Meals available in Freefood
Examples:
!freefood 24.12.2015
"""
date_obj = validate_date_and_args(event.args)
if date_obj is None:
return 'Invalid date'
meals = hladnymatfyzak.ffood('freefood', day=date_obj.day,
month=date_obj.month, year=date_obj.year)
return output_meals(meals)
| mrshu/brutal-plugins | hladny_matfyzak.py | Python | apache-2.0 | 2,198 |
# Copyright 2014 DreamHost, LLC
#
# Author: DreamHost, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Commands related to routers.
"""
import argparse
import subprocess
import sys
from akanda.rug import commands
from akanda.rug.cli import message
from akanda.rug.api import nova, quantum
from novaclient import exceptions
from oslo.config import cfg
from neutronclient.v2_0 import client
class _TenantRouterCmd(message.MessageSending):
def get_parser(self, prog_name):
# Bypass the direct base class to let us put the tenant id
# argument first
p = super(_TenantRouterCmd, self).get_parser(prog_name)
p.add_argument(
'router_id',
)
return p
def make_message(self, parsed_args):
router_id = parsed_args.router_id.lower()
if router_id == 'error':
tenant_id = 'error'
elif router_id == '*':
tenant_id = '*'
else:
# Look up the tenant for a given router so we can send the
# command using both and the rug can route it to the correct
# worker. We do the lookup here instead of in the rug to avoid
# having to teach the rug notification and dispatching code
# about how to find the owner of a router, and to shift the
# burden of the neutron API call to the client so the server
# doesn't block. It also gives us a chance to report an error
# when we can't find the router.
n_c = client.Client(
username=self.app.rug_ini.admin_user,
password=self.app.rug_ini.admin_password,
tenant_name=self.app.rug_ini.admin_tenant_name,
auth_url=self.app.rug_ini.auth_url,
auth_strategy=self.app.rug_ini.auth_strategy,
region_name=self.app.rug_ini.auth_region,
)
response = n_c.list_routers(retrieve_all=True, id=router_id)
try:
router_details = response['routers'][0]
except (KeyError, IndexError):
raise ValueError('No router with id %r found: %s' %
(router_id, response))
assert router_details['id'] == router_id
tenant_id = router_details['tenant_id']
self.log.info(
'sending %s instruction for tenant %r, router %r',
self._COMMAND,
tenant_id,
router_id,
)
return {
'command': self._COMMAND,
'router_id': router_id,
'tenant_id': tenant_id,
}
class RouterUpdate(_TenantRouterCmd):
"""force-update a router"""
_COMMAND = commands.ROUTER_UPDATE
class RouterRebuild(_TenantRouterCmd):
"""force-rebuild a router"""
_COMMAND = commands.ROUTER_REBUILD
def get_parser(self, prog_name):
p = super(RouterRebuild, self).get_parser(prog_name)
p.add_argument(
'--router_image_uuid',
)
return p
def take_action(self, parsed_args):
uuid = parsed_args.router_image_uuid
if uuid:
nova_client = nova.Nova(cfg.CONF).client
try:
nova_client.images.get(uuid)
except exceptions.NotFound:
self.log.exception(
'could not retrieve custom image %s from Glance:' % uuid
)
raise
return super(RouterRebuild, self).take_action(parsed_args)
def make_message(self, parsed_args):
message = super(RouterRebuild, self).make_message(parsed_args)
message['router_image_uuid'] = parsed_args.router_image_uuid
return message
class RouterDebug(_TenantRouterCmd):
"""debug a single router"""
_COMMAND = commands.ROUTER_DEBUG
class RouterManage(_TenantRouterCmd):
"""manage a single router"""
_COMMAND = commands.ROUTER_MANAGE
class RouterSSH(_TenantRouterCmd):
"""ssh into a router over the management network"""
def get_parser(self, prog_name):
p = super(RouterSSH, self).get_parser(prog_name)
p.add_argument('remainder', nargs=argparse.REMAINDER)
return p
def take_action(self, parsed_args):
n_c = client.Client(
username=self.app.rug_ini.admin_user,
password=self.app.rug_ini.admin_password,
tenant_name=self.app.rug_ini.admin_tenant_name,
auth_url=self.app.rug_ini.auth_url,
auth_strategy=self.app.rug_ini.auth_strategy,
region_name=self.app.rug_ini.auth_region,
)
router_id = parsed_args.router_id.lower()
ports = n_c.show_router(router_id).get('router', {}).get('ports', {})
for port in ports:
if port['fixed_ips'] and \
port['device_owner'] == quantum.DEVICE_OWNER_ROUTER_MGT:
v6_addr = port['fixed_ips'].pop()['ip_address']
try:
cmd = ["ssh", "root@%s" % v6_addr] + parsed_args.remainder
subprocess.check_call(cmd)
except subprocess.CalledProcessError as e:
sys.exit(e.returncode)
| markmcclain/astara | akanda/rug/cli/router.py | Python | apache-2.0 | 5,687 |
//===----------------------------------------------------------------------===//
//
// Peloton
//
// primitive_value_proxy.cpp
//
// Identification: src/codegen/primitive_value_proxy.cpp
//
// Copyright (c) 2015-17, Carnegie Mellon University Database Group
//
//===----------------------------------------------------------------------===//
#include "codegen/primitive_value_proxy.h"
namespace peloton {
namespace codegen {
int8_t PrimitiveValueProxy::GetTinyInt(
char **values, uint32_t offset) {
return *reinterpret_cast<int8_t *>(values[offset]);
}
int16_t PrimitiveValueProxy::GetSmallInt(
char **values, uint32_t offset) {
return *reinterpret_cast<int16_t *>(values[offset]);
}
int32_t PrimitiveValueProxy::GetInteger(
char **values, uint32_t offset) {
return *reinterpret_cast<int32_t *>(values[offset]);
}
int64_t PrimitiveValueProxy::GetBigInt(
char **values, uint32_t offset) {
return *reinterpret_cast<int64_t *>(values[offset]);
}
double PrimitiveValueProxy::GetDouble(
char **values, uint32_t offset) {
return *reinterpret_cast<double *>(values[offset]);
}
int32_t PrimitiveValueProxy::GetDate(
char **values, uint32_t offset) {
return *reinterpret_cast<int32_t *>(values[offset]);
}
uint64_t PrimitiveValueProxy::GetTimestamp(
char **values, uint32_t offset) {
return *reinterpret_cast<uint64_t *>(values[offset]);
}
char *PrimitiveValueProxy::GetVarcharVal(
char **values, uint32_t offset) {
return values[offset];
}
size_t PrimitiveValueProxy::GetVarcharLen(
int32_t *values, uint32_t offset) {
return values[offset];
}
//===----------------------------------------------------------------------===//
// GET TINYINT
//===----------------------------------------------------------------------===//
// Get the symbol name for PrimitiveValueProxy::GetTinyInt()
const std::string &PrimitiveValueProxy::_GetTinyInt::GetFunctionName() {
static const std::string kGetTinyIntFnName =
"_ZN7peloton7codegen19PrimitiveValueProxy10GetTinyIntEPPcj";
return kGetTinyIntFnName;
}
llvm::Function *PrimitiveValueProxy::_GetTinyInt::GetFunction(
CodeGen &codegen) {
const std::string &fn_name = GetFunctionName();
// Has the function already been registered?
llvm::Function *llvm_fn = codegen.LookupFunction(fn_name);
if (llvm_fn != nullptr) {
return llvm_fn;
}
auto *fn_type = llvm::FunctionType::get(
codegen.Int8Type(),
{codegen.CharPtrType()->getPointerTo(), codegen.Int64Type()},
false);
return codegen.RegisterFunction(fn_name, fn_type);
}
//===----------------------------------------------------------------------===//
// GET SMALLINT
//===----------------------------------------------------------------------===//
const std::string &PrimitiveValueProxy::_GetSmallInt::GetFunctionName() {
static const std::string kGetSmallIntFnName =
"_ZN7peloton7codegen19PrimitiveValueProxy11GetSmallIntEPPcj";
return kGetSmallIntFnName;
}
llvm::Function *PrimitiveValueProxy::_GetSmallInt::GetFunction(
CodeGen &codegen) {
const std::string &fn_name = GetFunctionName();
// Has the function already been registered?
llvm::Function *llvm_fn = codegen.LookupFunction(fn_name);
if (llvm_fn != nullptr) {
return llvm_fn;
}
auto *fn_type = llvm::FunctionType::get(
codegen.Int16Type(),
{codegen.CharPtrType()->getPointerTo(), codegen.Int64Type()},
false);
return codegen.RegisterFunction(fn_name, fn_type);
}
//===----------------------------------------------------------------------===//
// GET INTEGER
//===----------------------------------------------------------------------===//
const std::string &PrimitiveValueProxy::_GetInteger::GetFunctionName() {
static const std::string kGetIntegerFnName =
"_ZN7peloton7codegen19PrimitiveValueProxy10GetIntegerEPPcj";
return kGetIntegerFnName;
}
llvm::Function *PrimitiveValueProxy::_GetInteger::GetFunction(
CodeGen &codegen) {
const std::string &fn_name = GetFunctionName();
// Has the function already been registered?
llvm::Function *llvm_fn = codegen.LookupFunction(fn_name);
if (llvm_fn != nullptr) {
return llvm_fn;
}
auto *fn_type = llvm::FunctionType::get(
codegen.Int32Type(),
{codegen.CharPtrType()->getPointerTo(), codegen.Int64Type()},
false);
return codegen.RegisterFunction(fn_name, fn_type);
}
//===----------------------------------------------------------------------===//
// GET BIGINT
//===----------------------------------------------------------------------===//
const std::string &PrimitiveValueProxy::_GetBigInt::GetFunctionName() {
static const std::string kGetBigIntFnName =
"_ZN7peloton7codegen19PrimitiveValueProxy9GetBigIntEPPcj";
return kGetBigIntFnName;
}
llvm::Function *PrimitiveValueProxy::_GetBigInt::GetFunction(
CodeGen &codegen) {
const std::string &fn_name = GetFunctionName();
// Has the function already been registered?
llvm::Function *llvm_fn = codegen.LookupFunction(fn_name);
if (llvm_fn != nullptr) {
return llvm_fn;
}
auto *fn_type = llvm::FunctionType::get(
codegen.Int64Type(),
{codegen.CharPtrType()->getPointerTo(), codegen.Int64Type()},
false);
return codegen.RegisterFunction(fn_name, fn_type);
}
//===----------------------------------------------------------------------===//
// GET DOUBLE
//===----------------------------------------------------------------------===//
const std::string &PrimitiveValueProxy::_GetDouble::GetFunctionName() {
static const std::string kGetDoubleFnName =
"_ZN7peloton7codegen19PrimitiveValueProxy9GetDoubleEPPcj";
return kGetDoubleFnName;
}
llvm::Function *PrimitiveValueProxy::_GetDouble::GetFunction(
CodeGen &codegen) {
const std::string &fn_name = GetFunctionName();
// Has the function already been registered?
llvm::Function *llvm_fn = codegen.LookupFunction(fn_name);
if (llvm_fn != nullptr) {
return llvm_fn;
}
auto *fn_type = llvm::FunctionType::get(
codegen.DoubleType(),
{codegen.CharPtrType()->getPointerTo(), codegen.Int64Type()},
false);
return codegen.RegisterFunction(fn_name, fn_type);
}
//===----------------------------------------------------------------------===//
// PEEK TIMESTAMP
//===----------------------------------------------------------------------===//
const std::string &PrimitiveValueProxy::_GetTimestamp::GetFunctionName() {
static const std::string kGetTimestampFnName =
#ifdef __APPLE__
"_ZN7peloton7codegen13ValuesRuntime13outputVarcharEPcjS2_j";
#else
"_ZN7peloton7codegen19PrimitiveValueProxy12GetTimestampEPPcj";
#endif
return kGetTimestampFnName;
}
llvm::Function *PrimitiveValueProxy::_GetTimestamp::GetFunction(
CodeGen &codegen) {
const std::string &fn_name = GetFunctionName();
// Has the function already been registered?
llvm::Function *llvm_fn = codegen.LookupFunction(fn_name);
if (llvm_fn != nullptr) {
return llvm_fn;
}
auto *fn_type = llvm::FunctionType::get(
codegen.Int64Type(),
{codegen.CharPtrType()->getPointerTo(), codegen.Int64Type()},
false);
return codegen.RegisterFunction(fn_name, fn_type);
}
//===----------------------------------------------------------------------===//
// GET DATE
//===----------------------------------------------------------------------===//
const std::string &PrimitiveValueProxy::_GetDate::GetFunctionName() {
static const std::string kGetDateFnName =
#ifdef __APPLE__
"_ZN7peloton7codegen13ValuesRuntime13outputVarcharEPcjS2_j";
#else
"_ZN7peloton7codegen19PrimitiveValueProxy7GetDateEPPcj";
#endif
return kGetDateFnName;
}
llvm::Function *PrimitiveValueProxy::_GetDate::GetFunction(
CodeGen &codegen) {
const std::string &fn_name = GetFunctionName();
// Has the function already been registered?
llvm::Function *llvm_fn = codegen.LookupFunction(fn_name);
if (llvm_fn != nullptr) {
return llvm_fn;
}
auto *fn_type = llvm::FunctionType::get(
codegen.Int32Type(),
{codegen.CharPtrType()->getPointerTo(), codegen.Int64Type()},
false);
return codegen.RegisterFunction(fn_name, fn_type);
}
//===----------------------------------------------------------------------===//
// GET VARCHAR VAL
//===----------------------------------------------------------------------===//
const std::string &PrimitiveValueProxy::_GetVarcharVal::GetFunctionName() {
static const std::string kGetVarcharValFnName =
#ifdef __APPLE__
"_ZN7peloton7codegen13ValuesRuntime13outputVarcharEPcjS2_j";
#else
"_ZN7peloton7codegen19PrimitiveValueProxy13GetVarcharValEPPcj";
#endif
return kGetVarcharValFnName;
}
llvm::Function *PrimitiveValueProxy::_GetVarcharVal::GetFunction(
CodeGen &codegen) {
const std::string &fn_name = GetFunctionName();
// Has the function already been registered?
llvm::Function *llvm_fn = codegen.LookupFunction(fn_name);
if (llvm_fn != nullptr) {
return llvm_fn;
}
auto *fn_type = llvm::FunctionType::get(
codegen.CharPtrType(),
{codegen.CharPtrType()->getPointerTo(), codegen.Int64Type()},
false);
return codegen.RegisterFunction(fn_name, fn_type);
}
//===----------------------------------------------------------------------===//
// GET VARCHAR LEN
//===----------------------------------------------------------------------===//
const std::string &PrimitiveValueProxy::_GetVarcharLen::GetFunctionName() {
static const std::string kGetVarcharLenFnName =
#ifdef __APPLE__
"_ZN7peloton7codegen13ValuesRuntime13outputVarcharEPcjS2_j";
#else
"_ZN7peloton7codegen19PrimitiveValueProxy13GetVarcharLenEPij";
#endif
return kGetVarcharLenFnName;
}
llvm::Function *PrimitiveValueProxy::_GetVarcharLen::GetFunction(
CodeGen &codegen) {
const std::string &fn_name = GetFunctionName();
// Has the function already been registered?
llvm::Function *llvm_fn = codegen.LookupFunction(fn_name);
if (llvm_fn != nullptr) {
return llvm_fn;
}
auto *fn_type = llvm::FunctionType::get(
codegen.Int32Type(),
{codegen.Int32Type()->getPointerTo(), codegen.Int64Type()},
false);
return codegen.RegisterFunction(fn_name, fn_type);
}
} // namespace codegen
} // namespace peloton | phisiart/peloton-p3 | src/codegen/primitive_value_proxy.cpp | C++ | apache-2.0 | 10,369 |
App.source = {
controllers : {},
models : {}
};
App.source.controllers.repository = {
/**
* Add repository
*/
add : function() {
$(document).ready(function() {
App.source.AddEditForm.init();
});
},
/**
* Edit repository
*/
edit : function() {
$(document).ready(function() {
App.source.AddEditForm.init();
});
},
/**
* History page behaviour
*/
history : function() {
$(document).ready(function() {
$('tr.commit div.commit_files').hide(); // hide all paths on page load
$('#toggle_all_paths span').text(App.lang('Show all paths')); // set initial text
// show/hide one
$('tr.commit').each(function() {
var wrapper = $(this);
wrapper.find('a.toggle_files').click(function() {
wrapper.find('div.commit_files').toggle();
return false;
});
}); // show/hide one
$('#repository_delete_page_action').click(function() {
return confirm(App.lang('Are you sure that you wish to delete this repository from activeCollab?'));
});
// show/hide all
var toggle_new_class = null;
var link_text = null;
$('#toggle_all_paths').click(function() {
var toggle_button = $(this);
if (toggle_button.is('.hide')) {
$('tr.commit div.commit_files').hide();
link_text = App.lang('Show all paths');
toggle_button.removeClass('hide');
toggle_button.addClass('show');
}
else {
$('tr.commit div.commit_files').show();
link_text = App.lang('Hide all paths');
toggle_button.removeClass('show');
toggle_button.addClass('hide');
}
$('span', toggle_button).text(link_text);
return false;
}); //show/hide all
// Update repository
/*
$('#repository_ajax_update, a.repository_ajax_update').click(function() {
var delimiter = App.data.path_info_through_query_string ? '&' : '?';
App.ModalDialog.show('repository_update', App.lang('Repository update'), $('<p><img src="' + App.data.assets_url + '/images/indicator.gif" alt="" /> ' + App.lang('Checking for updates...') + '</p>').load($(this).attr('href')+ delimiter + 'skip_layout=1&async=1'), {
buttons : false,
width: 400
});
return false;
});
*/
});
}, // history
update : function() {
progress_div = $('#repository_update_progress');
var delimiter = App.data.path_info_through_query_string ? '&' : '?';
var notify_subscribers = function(total_commits) {
$('#progress_content').append('<p class="subscribers"><img src="' + App.data.assets_url + '/images/indicator.gif" alt="" /> ' + App.lang('Sending subscriptions...') + ' </p>');
$.ajax({
url: App.data.repository_update_url + delimiter + 'async=1¬ify=' + total_commits,
type: 'GET',
success : function(response) {
$('#progress_content p.subscribers img').attr({
'src' : App.data.assets_url + '/images/ok_indicator.gif'
});
$('#progress_content p.subscribers').append(App.lang('Done!'));
}
});
}
var get_logs = function(commit, total_commits) {
progress_content = $('#progress_content');
progress_content.html('<p><img src="' + App.data.assets_url + '/images/indicator.gif" alt="" /> Importing commit #' + commit + '</p>');
$.ajax( {
url: App.data.repository_update_url + delimiter + 'r=' + commit + '&async=1&skip_missing_revisions=1',
type: 'GET',
success : function(response) {
if (response == 'success') {
if (commit !== App.data.repository_head_revision) {
commit++;
get_logs(commit, total_commits);
}
else {
progress_content.html('<p><img src="' + App.data.assets_url + '/images/ok_indicator.gif" alt="" /> '+ App.lang('Repository successfully updated') + '</p>');
notify_subscribers(total_commits);
}
}
else {
progress_content.html(response); // if not success, reponse is a svn error message
}
}
});
}
if (App.data.repository_uptodate == 1) {
progress_div.html('<p><img src="' + App.data.assets_url + '/images/ok_indicator.gif" alt="" /> '+ App.lang('Repository is already up-to-date') + '</p>');
}
else {
total_commits = App.data.repository_head_revision - App.data.repository_last_revision;
commit = App.data.repository_last_revision+1;
if (total_commits > 0) {
progress_div.prepend('<p>There are new commits, please wait until the repository gets updated to revision #'+App.data.repository_head_revision+'</p>');
get_logs(commit, total_commits);
}
else {
progress_div.prepend('<p>' + App.lang('Error getting new commits') + ':</p>');
}
}
}, // update
browse : function() {
$(document).ready(function () {
App.widgets.SourceFilePages.init();
$('a.source_item_info').each(function() {
var link_obj = $(this);
link_obj.click(function() {
App.ModalDialog.show('item_info', App.lang('Item info'), $('<p><img src="' + App.data.assets_url + '/images/indicator.gif" alt="" /> ' + App.lang('Fetching data...') + '</p>').load(link_obj.attr('href')), {
buttons : false,
width: 700
});
return false;
});
}); // show/hide one
})
}, // browse
repository_users : function() {
$(document).ready(function () {
var select_box = $('#repository_user');
$('table.mapped_users').find('a.remove_source_user').click(function() {
if (confirm(App.lang('Are you sure that you wish to delete remove this mapping?'))) {
var link = $(this);
var img = link.find('img');
var old_src = img.attr('src');
img.attr('src', App.data.indicator_url);
$.ajax({
url : App.extendUrl(link.attr('href'), {'async' : 1}),
type : 'POST',
data : {'submitted' : 'submitted', 'repository_user' : link.attr('name')},
success : function(response) {
if (response == 'true') {
link.parent().parent().remove();
select_box.append('<option value="'+link.attr('name')+'">'+link.attr('name')+'</option>');
$('#all_mapped').hide();
$('#no_users').hide();
$('#new_record').show();
if ($('#records tbody').children().length == 0) {
$('#records').hide();
}
} else {
img.attr('src', old_src);
}
}
});
}
return false;
});
var form = $('form.map_user_form');
form.attr('action', App.extendUrl(form.attr('action'), { async : 1 }));
form.submit(function() {
if ($('#user_id').find('option:selected').val() == '') {
alert(App.lang('Please select activeCollab user'));
return false;
}
var form = $(this);
$('#new_record td.actions').prepend('<img src="' + App.data.indicator_url + '" alt="" />').find('button').hide();
$(this).ajaxSubmit({
success: function(responseText) {
$('#records tbody').prepend(responseText);
$('#records').show();
$('#new_record td.actions').find('img').remove();
$('#new_record td.actions').find('button').show();
var new_row = $('#records tbody tr:first');
new_row.find('td').highlightFade();
select_box.find('option:selected').remove();
if (select_box.children().length == 0) {
$('#new_record').hide();
$('#all_mapped').show();
}
},
error : function() {
$('#new_record td.actions').find('img').remove();
$('#new_record td.actions').find('button').show();
}
});
return false;
});
});
}
};
/**
* Javascript for source administration
*/
App.source.controllers.source_admin = {
index : function () {
$(document).ready(function () {
var test_results_div = $(this).find('.test_results');
var test_div = test_results_div.parent();
test_results_div.prepend('<img class="source_results_img" src="" alt=""/>');
$('.source_results_img').hide();
$('#check_svn_path button:eq(0)').click(function () {
$('.source_results_img').show();
var svn_path = $('#svn_path').val();
var indicator_img = $('.source_results_img');
var result_span = test_div.find('.test_results span:eq(0)');
indicator_img.attr('src', App.data.indicator_url);
result_span.html('');
$.ajax({
type: "GET",
data: "svn_path=" + svn_path,
url: App.data.test_svn_url,
success: function(msg){
if (msg=='true') {
indicator_img.attr('src', App.data.ok_indicator_url);
result_span.html(App.lang('Subversion executable found'));
} else {
indicator_img.attr('src', App.data.error_indicator_url);
result_span.html(App.lang('Error accessing SVN executable') + ': ' + msg);
} // if
}
});
});
});
}
};
/**
* Init JS functions for source file pages
*/
App.widgets.SourceFilePages = function () {
return {
init : function () {
var delimiter = '&';
$('#object_quick_option_compare a').click(function () {
var compared_revision = parseInt(prompt(App.lang('Enter revision number'), ""));
if (isNaN(compared_revision)) {
alert(App.lang('Please insert a revision number'));
} else {
window.location = App.data.compare_url + delimiter + 'compare_to=' + compared_revision + 'peg=' + App.data.active_revision;
} // if
return false;
});
$('#change_revision').click(function () {
var new_revision = parseInt(prompt(App.lang('Enter new revision number'), ""));
if (isNaN(new_revision)) {
alert(App.lang('Please insert a revision number'));
} else {
window.location = App.data.browse_url + delimiter + 'r=' + new_revision + delimiter + 'peg=' + App.data.active_revision;
} // if
return false;
});
}
}
} ();
/**
* Test repository connection
*/
App.source.AddEditForm = function() {
return {
init : function () {
var result_container = $('#test_connection .test_connection_results');
var result_image = $('img:eq(0)', result_container);
var result_output = $('span:eq(0)', result_container);
$('#test_connection button').click(function () {
result_output.html(App.lang('Checking...'));
result_image.attr('src', App.data.indicator_url);
if ($('#repositoryUrl').attr('value') == undefined) {
result_image.attr('src', App.data.error_indicator_url);
result_output.html(App.lang('You need to enter repository URL first'));
}
else {
var delimiter = App.data.path_info_through_query_string ? '&' : '?';
$.ajax( {
url: App.data.repository_test_connection_url + delimiter + 'url=' + $('#repositoryUrl').attr('value') + '&user=' + $('#repositoryUsername').attr('value') + '&pass=' + $('#repositoryPassword').attr('value') + '&engine=' + $('#repositoryType option:selected').attr('value'),
type: 'GET',
success : function(response) {
if (response == 'ok') {
result_image.attr('src', App.data.ok_indicator_url);
result_output.html(App.lang('Connection parameters are valid'));
}
else {
result_image.attr('src', App.data.error_indicator_url);
result_output.html(App.lang('Could not connect to repository:') + ' ' + response); // if not success, reponse is a svn error message
}
}
});
}
});
}
};
} (); | hammadalinaqvi/bookstoregenie | projMan/public/assets/modules/source/javascript/main.js | JavaScript | apache-2.0 | 12,557 |
#include <comm/socket.h>
/*-----------------------------------------------------------------*/
#ifdef __cplusplus
/* callback to read from a ISockioInterface object */
UINT32 isioIOReadCfn (void *pFin, UINT8 pBuf[], const UINT32 ulBufLen)
{
static const SINT32 DEFAULT_READ_TIMEOUT=30;
ISockioInterface *pSock=(ISockioInterface *) pFin;
UINT32 rLen=0;
UINT8 *bp=pBuf;
if ((NULL == pFin) || (NULL == pBuf))
return IOCFN_BAD_LEN;
while (rLen < ulBufLen)
{
int sCount=pSock->Read((char *) bp, (ulBufLen - rLen), DEFAULT_READ_TIMEOUT);
// if read something from socket, then return it (delay EOF till next call)
if (sCount < 0)
{
if (0 == rLen)
return IOCFN_BAD_LEN;
break;
}
rLen += sCount;
bp += sCount;
}
return rLen;
}
#endif /* of __cplusplus */
/*-----------------------------------------------------------------*/
UINT32 sockIOWriteCfn (void *pFout, const char pBuf[], const UINT32 ulBufLen)
{
SOCKET sock=(SOCKET) pFout;
int sCount=(-1);
if ((BAD_SOCKET == sock) || (NULL == pBuf))
return IOCFN_BAD_LEN;
if ((UINT32) (sCount=sockWrite(sock, pBuf, ulBufLen)) != ulBufLen)
return IOCFN_BAD_LEN;
else
return ulBufLen;
}
/*---------------------------------------------------------------------------*/
#ifdef __cplusplus
/* callback to write to a ISockioInterface object */
UINT32 isioIOWriteCfn (void *pFout, const char pBuf[], const UINT32 ulBufLen)
{
ISockioInterface *pSock=(ISockioInterface *) pFout;
int sCount=(-1);
if ((NULL == pFout) || (NULL == pBuf))
return IOCFN_BAD_LEN;
if ((UINT32) (sCount=pSock->Write(pBuf, ulBufLen)) != ulBufLen)
return IOCFN_BAD_LEN;
else
return ulBufLen;
}
#endif /* of __cplusplus */
/*-----------------------------------------------------------------*/
| lgoldstein/communitychest | chest/net/common/src/main/cpp/comm/siocfns.cpp | C++ | apache-2.0 | 1,777 |
<?php include("head.php"); ?>
<div class="title">
<h2>Anti-pattern: </h2><h3>PRNG for CSRF</h3>
</div>
<h4>Form with CSRF</h4>
<form action="">
<label>Donation amount</label>
<input type="text" value="10.00">
<?php
$uid = "12345678";
$csrfToken = md5($uid . rand() . microtime());
setCookie("csrfToken", $csrfToken);
echo "<input type=\"hidden\" value=\"$csrfToken\">";
?>
<input type="submit" value="Submit">
</form>
<?php include("foot.php"); ?>
| disaacson/cargo-cult-security | csrf.php | PHP | apache-2.0 | 488 |
var show = require('./demo10');
show('hh')
| fanbrightup/firsthalf2017 | feb/2-18/test.js | JavaScript | apache-2.0 | 43 |
#include "stdafx.h"
// General
#include "SkyManager.h"
namespace
{
const float C_SkyRadius = 400.0f;
const uint32 C_SkySegmentsCount = 32;
//.............................top.............................med.............................medh............................horiz...........................bottom
const float C_SkyAngles[] = { 90.0f, 30.0f, 15.0f, 5.0f, 0.0f, -30.0f, -90.0f };
const ESkyColors C_Skycolors[] = { ESkyColors::SKY_COLOR_SKY_0, ESkyColors::SKY_COLOR_SKY_1, ESkyColors::SKY_COLOR_SKY_2, ESkyColors::SKY_COLOR_SKY_3, ESkyColors::SKY_COLOR_SKY_4, ESkyColors::SKY_COLOR_FOG, ESkyColors::SKY_COLOR_FOG };
const uint32 C_SkycolorsCount = 7;
inline void rotate(float x0, float y0, float *x, float *y, float angle)
{
float xa = *x - x0;
float ya = *y - y0;
*x = xa * glm::cos(angle) - ya * glm::sin(angle) + x0;
*y = xa * glm::sin(angle) + ya * glm::cos(angle) + y0;
}
}
CSkyManager::CSkyManager(IScene& Scene)
: CSceneNode(Scene)
{
dynamic_cast<IObjectPrivate*>(this)->SetGUID(Guid(ObjectTypes::otSceneNode, cSceneNodeSkyInstance, 1u));
m_Time.Set(11, 45);
LoadDayNightPhases();
}
CSkyManager::~CSkyManager()
{}
bool CSkyManager::Load(uint32 MapID)
{
for (const auto& it : GetBaseManager().GetManager<CDBCStorage>()->DBC_Light())
{
if (MapID == it->Get_MapID())
{
auto sky = MakeShared(Sky, GetBaseManager().GetManager<CDBCStorage>(), it);
m_MapSkies.push_back(sky);
}
}
std::sort(m_MapSkies.begin(), m_MapSkies.end(), [](const std::shared_ptr<Sky>& lhs, const std::shared_ptr<Sky>& rhs)
{
if (lhs->m_IsGlobalSky)
return false;
else if (rhs->m_IsGlobalSky)
return true;
else
return lhs->m_Range.max < rhs->m_Range.max;
});
if (m_MapSkies.size() > 0 && !m_MapSkies.back()->m_IsGlobalSky)
{
Log::Error("Sky: Sky for maps [%d] size [%d] don't have global sky!!!", MapID, m_MapSkies.size());
m_MapSkies.back()->m_IsGlobalSky = true;
}
SetCullStrategy(ECullStrategy::None);
CreateSkyColorsBuffer();
//SetState(ILoadable::ELoadableState::Loaded);
return true;
}
//
// ISkyManager
//
void CSkyManager::CalculateCurrentSky(const glm::vec3& CameraPosition)
{
if (m_MapSkies.empty())
return;
CalculateSkiesWeights(CameraPosition);
// interpolation
m_CurrentSkyParams.Clear();
for (const auto& it : m_MapSkies)
{
if (it->m_Wight > 0.0f)
{
SSkyParams params = it->Interpolate(m_Time.GetTime());
params *= it->m_Wight;
m_CurrentSkyParams += params;
}
}
// Geometry
std::vector<ColorRGBA> colors;
for (uint32 h = 0; h < C_SkySegmentsCount; h++)
{
for (uint32 v = 0; v < C_SkycolorsCount - 1; v++)
{
colors.push_back(ColorRGBA(GetColor(C_Skycolors[v]), 1.0f));
colors.push_back(ColorRGBA(GetColor(C_Skycolors[v + 1]), 1.0f));
colors.push_back(ColorRGBA(GetColor(C_Skycolors[v + 1]), 1.0f));
colors.push_back(ColorRGBA(GetColor(C_Skycolors[v + 1]), 1.0f));
colors.push_back(ColorRGBA(GetColor(C_Skycolors[v]), 1.0f));
colors.push_back(ColorRGBA(GetColor(C_Skycolors[v]), 1.0f));
}
}
std::shared_ptr<IBuffer> colorsBufferNew = GetRenderDevice().GetObjectsFactory().CreateVertexBuffer(colors);
m_SkyColorsBuffer->Copy(colorsBufferNew.get());
}
void CSkyManager::CreateSkyColorsBuffer()
{
glm::vec3 basepos1[C_SkycolorsCount];
glm::vec3 basepos2[C_SkycolorsCount];
std::vector<glm::vec3> vertices;
for (uint32 h = 0; h < C_SkySegmentsCount; h++)
{
for (uint32 i = 0; i < C_SkycolorsCount; i++)
{
basepos1[i] = basepos2[i] = glm::vec3(cosf(glm::radians(C_SkyAngles[i])), sinf(glm::radians(C_SkyAngles[i])), 0.0f) * C_SkyRadius;
rotate(0, 0, &basepos1[i].x, &basepos1[i].z, glm::two_pi<float>() / C_SkySegmentsCount * (h + 0));
rotate(0, 0, &basepos2[i].x, &basepos2[i].z, glm::two_pi<float>() / C_SkySegmentsCount * (h + 1));
}
for (uint32 v = 0; v < C_SkycolorsCount - 1; v++)
{
vertices.push_back(basepos1[v]);
vertices.push_back(basepos1[v + 1]);
vertices.push_back(basepos2[v + 1]);
vertices.push_back(basepos2[v + 1]);
vertices.push_back(basepos2[v]);
vertices.push_back(basepos1[v]);
}
}
// Vertex buffer
std::shared_ptr<IBuffer> vertexBuffer = GetRenderDevice().GetObjectsFactory().CreateVertexBuffer(vertices);
// Colors buffer
m_SkyColorsBuffer = GetRenderDevice().GetObjectsFactory().CreateVoidVertexBuffer(vertices.data(), vertices.size(), 0, sizeof(glm::vec4));
// Geometry
std::shared_ptr<IGeometry> geometry = GetRenderDevice().GetObjectsFactory().CreateGeometry();
geometry->AddVertexBuffer(BufferBinding("POSITION", 0), vertexBuffer);
geometry->AddVertexBuffer(BufferBinding("COLOR", 0), m_SkyColorsBuffer);
// Material
std::shared_ptr<IModel> model = GetRenderDevice().GetObjectsFactory().CreateModel();
model->AddConnection(nullptr, geometry);
GetComponentT<IModelComponent>()->SetModel(model);
}
void CSkyManager::CalculateSkiesWeights(const glm::vec3& CameraPosition)
{
m_MapSkies.back()->m_Wight = 1.0f;
_ASSERT(m_MapSkies.back()->m_IsGlobalSky);
for (int i = m_MapSkies.size() - 2; i >= 0; i--)
{
std::shared_ptr<Sky> s = m_MapSkies[i];
const float dist = glm::length(CameraPosition - s->m_Position);
if (dist < s->m_Range.min)
{
// we're in a sky, zero out the rest
s->m_Wight = 1.0f;
for (uint32_t j = i + 1; j < m_MapSkies.size(); j++)
{
m_MapSkies[j]->m_Wight = 0.0f;
}
}
else if (dist < s->m_Range.max)
{
// we're in an outer area, scale down the other weights
float r = (dist - s->m_Range.min) / (s->m_Range.max - s->m_Range.min);
s->m_Wight = 1.0f - r;
for (uint32_t j = i + 1; j < m_MapSkies.size(); j++)
{
m_MapSkies[j]->m_Wight *= r;
}
}
else
{
s->m_Wight = 0.0f;
}
}
}
void CSkyManager::LoadDayNightPhases()
{
auto f = GetBaseManager().GetManager<IFilesManager>()->Open("World\\dnc.db");
if (f == nullptr)
{
Log::Error("DayNightCycle[]: Can't init day-night cycle.");
return;
}
uint32 nFields1, nFields;
// Header
f->readBytes(&nFields, 4);
f->readBytes(&nFields1, 4);
_ASSERT(nFields == nFields1);
_ASSERT(nFields == 25);
// Field Descriptions
uint32 magic0x53;
f->readBytes(&magic0x53, 4);
_ASSERT(magic0x53 == 0x53);
// Final offset
uint32 d;
f->readBytes(&d, 4); // d is now the final offset
// Skip names
f->seek(8 + nFields * 8);
while (f->getPos() < d)
{
SDayNightPhase ols(f);
m_DayNightPhases.push_back(ols);
}
}
void CSkyManager::CalculateCurrentDayNightPhase()
{
uint32 hourA = m_Time.GetTime() / 120;
uint32 hourB = (hourA + 1) % 24;
const SDayNightPhase& a = m_DayNightPhases[hourA];
const SDayNightPhase& b = m_DayNightPhases[hourB];
float r = static_cast<float>(m_Time.GetTime() - (hourA * 120)) / 120.0f;
m_CurrentDayNightPhase = SDayNightPhase(a, b, r);
}
//
// ISceneNode
//
void CSkyManager::Update(const UpdateEventArgs& e)
{
//m_Time.Tick();
CalculateCurrentSky(e.CameraForCulling->GetPosition());
CalculateCurrentDayNightPhase();
// Sky is slways player pos
SetLocalPosition(e.CameraForCulling->GetPosition());
}
#if 0
void EnvironmentManager::outdoorLighting()
{
//m_OutdoorAmbientColor = vec4(m_SkyManager->GetColor(ESkyColors::SKY_COLOR_GLOBAL_AMBIENT), 1.0f); // BLACK?
//m_OutdoorDayDiffuseColor = vec4(m_SkyManager->GetColor(ESkyColors::SKY_COLOR_GLOBAL_DIFFUSE) * m_CurrentDayNightPhase.dayIntensity, 1.0f);
//m_OutdoorNightDiffuseColor = vec4(m_SkyManager->GetColor(ESkyColors::SKY_COLOR_GLOBAL_DIFFUSE) * m_CurrentDayNightPhase.nightIntensity, 1.0f);
//m_OutdoorSpecularColor = vec4(1.4f, 1.4f, 1.4f, 1.0f);
}
void EnvironmentManager::SetAmbientLights(bool on)
{
if (on)
{
/*vec4 ambient(m_SkyManager->GetColor(ESkyColors::SKY_COLOR_GLOBAL_AMBIENT), 1);
//glLightModelfv(GL_LIGHT_MODEL_AMBIENT, ambient);
if (m_CurrentDayNightPhase.dayIntensity > 0)
{
//glEnable(GL_LIGHT0);
}
else
{
//glDisable(GL_LIGHT0);
}
if (m_CurrentDayNightPhase.nightIntensity > 0)
{
//glEnable(GL_LIGHT1);
}
else
{
//glDisable(GL_LIGHT1);
}*/
}
else
{
glm::vec4 ambient(0, 0, 0, 1);
//glLightModelfv(GL_LIGHT_MODEL_AMBIENT, ambient);
//glDisable(GL_LIGHT0);
//glDisable(GL_LIGHT1);
}
}
void EnvironmentManager::SetFog()
{
/*if (m_QualitySettings.drawfog)
{
float fogdist = m_QualitySettings.fogdistance;
float fogstart = 0.5f;
m_QualitySettings.culldistance = fogdist;
//vec4 fogcolor(m_SkyManager->GetColor(ESkyColors::SKY_COLOR_FOG), 1);
//glFogfv(GL_FOG_COLOR, fogcolor); // TODO: retreive fogstart and fogend from lights.lit somehow
//glFogf(GL_FOG_START, fogdist * fogstart);
//glFogf(GL_FOG_END, fogdist);
//glEnable(GL_FOG);
}
else
{
//glDisable(GL_FOG);
m_QualitySettings.culldistance = m_QualitySettings.MapChunkRenderDistance;
}*/
}
#endif | bouzi71/OpenWow | owGame/Sky/SkyManager.cpp | C++ | apache-2.0 | 8,866 |
package com.wslfinc.cf.sdk.entities;
/**
* @author Wsl_F
*/
public enum SubmissionType {
/**
* If type is PRELIMINARY then points can decrease (if, for example,
* solution will fail during system test).
*/
PRELIMINARY,
/**
* party can only increase points for this problem by submitting better
* solutions.
*/
FINAL
}
| WslF/CF-rating-prediction | CF-PredictorBackEnd/src/main/java/com/wslfinc/cf/sdk/entities/SubmissionType.java | Java | apache-2.0 | 348 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.expression.function.scalar.string;
import org.elasticsearch.xpack.ql.execution.search.QlSourceBuilder;
import org.elasticsearch.xpack.ql.expression.Expression;
import org.elasticsearch.xpack.ql.expression.gen.pipeline.Pipe;
import org.elasticsearch.xpack.ql.tree.NodeInfo;
import org.elasticsearch.xpack.ql.tree.Source;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
public class EndsWithFunctionPipe extends Pipe {
private final Pipe input, pattern;
private final boolean isCaseSensitive;
public EndsWithFunctionPipe(Source source, Expression expression, Pipe input, Pipe pattern, boolean isCaseSensitive) {
super(source, expression, Arrays.asList(input, pattern));
this.input = input;
this.pattern = pattern;
this.isCaseSensitive = isCaseSensitive;
}
@Override
public final Pipe replaceChildren(List<Pipe> newChildren) {
return replaceChildren(newChildren.get(0), newChildren.get(1));
}
@Override
public final Pipe resolveAttributes(AttributeResolver resolver) {
Pipe newInput = input.resolveAttributes(resolver);
Pipe newPattern = pattern.resolveAttributes(resolver);
if (newInput == input && newPattern == pattern) {
return this;
}
return replaceChildren(newInput, newPattern);
}
@Override
public boolean supportedByAggsOnlyQuery() {
return input.supportedByAggsOnlyQuery() && pattern.supportedByAggsOnlyQuery();
}
@Override
public boolean resolved() {
return input.resolved() && pattern.resolved();
}
protected EndsWithFunctionPipe replaceChildren(Pipe newInput, Pipe newPattern) {
return new EndsWithFunctionPipe(source(), expression(), newInput, newPattern, isCaseSensitive);
}
@Override
public final void collectFields(QlSourceBuilder sourceBuilder) {
input.collectFields(sourceBuilder);
pattern.collectFields(sourceBuilder);
}
@Override
protected NodeInfo<EndsWithFunctionPipe> info() {
return NodeInfo.create(this, EndsWithFunctionPipe::new, expression(), input, pattern, isCaseSensitive);
}
@Override
public EndsWithFunctionProcessor asProcessor() {
return new EndsWithFunctionProcessor(input.asProcessor(), pattern.asProcessor(), isCaseSensitive);
}
public Pipe input() {
return input;
}
public Pipe pattern() {
return pattern;
}
protected boolean isCaseSensitive() {
return isCaseSensitive;
}
@Override
public int hashCode() {
return Objects.hash(input, pattern, isCaseSensitive);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
EndsWithFunctionPipe other = (EndsWithFunctionPipe) obj;
return Objects.equals(input(), other.input())
&& Objects.equals(pattern(), other.pattern())
&& Objects.equals(isCaseSensitive(), other.isCaseSensitive());
}
}
| nknize/elasticsearch | x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/EndsWithFunctionPipe.java | Java | apache-2.0 | 3,428 |
function changeThemeFun(themeName) {/* 更换主题 */
//alert(themeName);
var $easyuiTheme = $('#easyuiTheme');
var url = $easyuiTheme.attr('href');
var href = url.substring(0, url.indexOf('themes')) + 'themes/' + themeName + '/easyui.css';
//alert(url);
$easyuiTheme.attr('href', href);
var $iframe = $('iframe');
//setInterval(show,3000);// 注意函数名没有引号和括弧!
//alert(href);
//alert($iframe.length);
//setTimeout(function(){
//在这里执行你的代码
//},500);
if ($iframe.length > 0) {
for ( var i = 0; i < $iframe.length; i++) {
var ifr = $iframe[i];
//alert($(ifr).contents().find('#easyuiTheme'));
$(ifr).contents().find('#easyuiTheme').attr('href', href);
}
}
$.cookie('easyuiThemeName', themeName, {
expires : 7
});
};
if ($.cookie('easyuiThemeName')) {
//火狐下不用延迟加载会有问题
setTimeout(function(){
//在这里执行你的代码
changeThemeFun($.cookie('easyuiThemeName'));
},500);
} | zhgo116/fancy | Fancy/src/main/webapp/ui/changeEasyuiTheme.js | JavaScript | apache-2.0 | 1,014 |
package org.hl7.fhir.instance.model;
/*
Copyright (c) 2011+, HL7, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of HL7 nor the names of its contributors may be used to
endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
// Generated on Tue, Sep 1, 2015 19:08-0400 for FHIR v1.0.0
import org.hl7.fhir.instance.model.annotations.DatatypeDef;
import org.hl7.fhir.instance.model.annotations.Block;
import org.hl7.fhir.instance.model.api.*;
/**
* A measured amount (or an amount that can potentially be measured). Note that measured amounts include amounts that are not precisely quantified, including amounts involving arbitrary units and floating currencies.
*/
@DatatypeDef(name="Duration")
public class Duration extends Quantity {
private static final long serialVersionUID = 1069574054L;
public Duration copy() {
Duration dst = new Duration();
copyValues(dst);
dst.value = value == null ? null : value.copy();
dst.comparator = comparator == null ? null : comparator.copy();
dst.unit = unit == null ? null : unit.copy();
dst.system = system == null ? null : system.copy();
dst.code = code == null ? null : code.copy();
return dst;
}
protected Duration typedCopy() {
return copy();
}
@Override
public boolean equalsDeep(Base other) {
if (!super.equalsDeep(other))
return false;
if (!(other instanceof Duration))
return false;
Duration o = (Duration) other;
return compareDeep(value, o.value, true) && compareDeep(comparator, o.comparator, true) && compareDeep(unit, o.unit, true)
&& compareDeep(system, o.system, true) && compareDeep(code, o.code, true);
}
@Override
public boolean equalsShallow(Base other) {
if (!super.equalsShallow(other))
return false;
if (!(other instanceof Duration))
return false;
Duration o = (Duration) other;
return compareValues(value, o.value, true) && compareValues(comparator, o.comparator, true) && compareValues(unit, o.unit, true)
&& compareValues(system, o.system, true) && compareValues(code, o.code, true);
}
public boolean isEmpty() {
return super.isEmpty() && (value == null || value.isEmpty()) && (comparator == null || comparator.isEmpty())
&& (unit == null || unit.isEmpty()) && (system == null || system.isEmpty()) && (code == null || code.isEmpty())
;
}
}
| cementsuf/hapi-fhir | hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/instance/model/Duration.java | Java | apache-2.0 | 3,973 |
<?php
// @link http://schemas.wbeme.com/json-schema/eme/users/request/get-user-batch-response/latest.json#
namespace Eme\Schemas\Users\Request;
use Gdbots\Pbj\Message;
interface GetUserBatchResponse extends Message
{
}
| wb-eme/schemas | build/php/src/Eme/Schemas/Users/Request/GetUserBatchResponse.php | PHP | apache-2.0 | 221 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.services.azure.storage;
import org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.util.NoOpProcessor;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.Before;
import org.junit.Test;
import java.util.HashMap;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
public class TestADLSCredentialsControllerService {
public static final String CREDENTIALS_SERVICE_IDENTIFIER = "credentials-service";
private static final String ACCOUNT_NAME_VALUE = "AccountName";
private static final String ACCOUNT_KEY_VALUE = "AccountKey";
private static final String SAS_TOKEN_VALUE = "SasToken";
public static final String END_POINT_SUFFIX_VALUE = "end.point.suffix";
private TestRunner runner;
private ADLSCredentialsControllerService credentialsService;
@Before
public void setUp() throws InitializationException {
runner = TestRunners.newTestRunner(NoOpProcessor.class);
credentialsService = new ADLSCredentialsControllerService();
runner.addControllerService(CREDENTIALS_SERVICE_IDENTIFIER, credentialsService);
}
@Test
public void testNotValidBecauseAccountNameMissing() {
configureAccountKey();
runner.assertNotValid(credentialsService);
}
@Test
public void testNotValidBecauseNoCredentialsIsSet() {
configureAccountName();
runner.assertNotValid(credentialsService);
}
@Test
public void testNotValidBecauseBothAccountKeyAndSasTokenSpecified() {
configureAccountName();
configureAccountKey();
configureSasToken();
runner.assertNotValid(credentialsService);
}
@Test
public void testNotValidBecauseBothAccountKeyAndUseManagedIdentitySpecified() {
configureAccountName();
configureAccountKey();
configureUseManagedIdentity();
runner.assertNotValid(credentialsService);
}
@Test
public void testNotValidBecauseBothSasTokenAndUseManagedIdentitySpecified() {
configureAccountName();
configureSasToken();
configureUseManagedIdentity();
runner.assertNotValid(credentialsService);
}
@Test
public void testNotValidBecauseAllCredentialsSpecified() {
configureAccountName();
configureAccountKey();
configureSasToken();
configureUseManagedIdentity();
runner.assertNotValid(credentialsService);
}
@Test
public void testNotValidWithEmptyEndpointSuffix() {
configureAccountName();
configureAccountKey();
runner.setProperty(credentialsService, ADLSCredentialsControllerService.ENDPOINT_SUFFIX, "");
runner.assertNotValid(credentialsService);
}
@Test
public void testNotValidWithWhitespaceEndpointSuffix() {
configureAccountName();
configureAccountKey();
runner.setProperty(credentialsService, ADLSCredentialsControllerService.ENDPOINT_SUFFIX, " ");
runner.assertNotValid(credentialsService);
}
@Test
public void testValidWithAccountNameAndAccountKey() {
configureAccountName();
configureAccountKey();
runner.assertValid(credentialsService);
}
@Test
public void testValidWithAccountNameAndSasToken() {
configureAccountName();
configureSasToken();
runner.assertValid(credentialsService);
}
@Test
public void testValidWithAccountNameAndUseManagedIdentity() {
configureAccountName();
configureUseManagedIdentity();
runner.assertValid(credentialsService);
}
@Test
public void testGetCredentialsDetailsWithAccountKey() throws Exception {
// GIVEN
configureAccountName();
configureAccountKey();
runner.enableControllerService(credentialsService);
// WHEN
ADLSCredentialsDetails actual = credentialsService.getCredentialsDetails(new HashMap<>());
// THEN
assertEquals(ACCOUNT_NAME_VALUE, actual.getAccountName());
assertEquals(ACCOUNT_KEY_VALUE, actual.getAccountKey());
assertNull(actual.getSasToken());
assertFalse(actual.getUseManagedIdentity());
assertNotNull(actual.getEndpointSuffix());
}
@Test
public void testGetCredentialsDetailsWithSasToken() throws Exception {
// GIVEN
configureAccountName();
configureSasToken();
runner.enableControllerService(credentialsService);
// WHEN
ADLSCredentialsDetails actual = credentialsService.getCredentialsDetails(new HashMap<>());
// THEN
assertEquals(ACCOUNT_NAME_VALUE, actual.getAccountName());
assertEquals(SAS_TOKEN_VALUE, actual.getSasToken());
assertNull(actual.getAccountKey());
assertFalse(actual.getUseManagedIdentity());
assertNotNull(actual.getEndpointSuffix());
}
@Test
public void testGetCredentialsDetailsWithUseManagedIdentity() throws Exception {
// GIVEN
configureAccountName();
configureUseManagedIdentity();
runner.enableControllerService(credentialsService);
// WHEN
ADLSCredentialsDetails actual = credentialsService.getCredentialsDetails(new HashMap<>());
// THEN
assertEquals(ACCOUNT_NAME_VALUE, actual.getAccountName());
assertTrue(actual.getUseManagedIdentity());
assertNull(actual.getAccountKey());
assertNull(actual.getSasToken());
assertNotNull(actual.getEndpointSuffix());
}
@Test
public void testGetCredentialsDetailsWithSetEndpointSuffix() throws Exception {
// GIVEN
configureAccountName();
configureAccountKey();
configureEndpointSuffix();
runner.enableControllerService(credentialsService);
// WHEN
ADLSCredentialsDetails actual = credentialsService.getCredentialsDetails(new HashMap<>());
// THEN
assertEquals(END_POINT_SUFFIX_VALUE, actual.getEndpointSuffix());
}
private void configureAccountName() {
runner.setProperty(credentialsService, ADLSCredentialsControllerService.ACCOUNT_NAME, ACCOUNT_NAME_VALUE);
}
private void configureAccountKey() {
runner.setProperty(credentialsService, AzureStorageUtils.ACCOUNT_KEY, ACCOUNT_KEY_VALUE);
}
private void configureSasToken() {
runner.setProperty(credentialsService, AzureStorageUtils.PROP_SAS_TOKEN, SAS_TOKEN_VALUE);
}
private void configureUseManagedIdentity() {
runner.setProperty(credentialsService, ADLSCredentialsControllerService.USE_MANAGED_IDENTITY, "true");
}
private void configureEndpointSuffix() {
runner.setProperty(credentialsService, ADLSCredentialsControllerService.ENDPOINT_SUFFIX, END_POINT_SUFFIX_VALUE);
}
}
| mattyb149/nifi | nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/test/java/org/apache/nifi/services/azure/storage/TestADLSCredentialsControllerService.java | Java | apache-2.0 | 7,904 |
namespace IdentityBase.EntityFramework
{
using IdentityBase.EntityFramework.Configuration;
using Microsoft.AspNetCore.Builder;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using ServiceBase.Modules;
public class InMemoryModule : IModule
{
public void ConfigureServices(
IServiceCollection services,
IConfiguration configuration)
{
services.AddEntityFrameworkStores((options) =>
{
options.DbContextOptions = (dbBuilder) =>
{
dbBuilder
.UseInMemoryDatabase("Put_value_from_config_here");
};
configuration.GetSection("EntityFramework").Bind(options);
});
}
public void Configure(IApplicationBuilder app)
{
using (IServiceScope serviceScope = app.ApplicationServices
.GetRequiredService<IServiceScopeFactory>().CreateScope())
{
EntityFrameworkOptions options = serviceScope.ServiceProvider
.GetService<EntityFrameworkOptions>();
if (options != null)
{
// Disable migration since InMemoryDatabase does not
// require one
options.MigrateDatabase = false;
}
}
}
}
} | aruss/IdentityBase | src/IdentityBase.EntityFramework/Modules/InMemoryModule.cs | C# | apache-2.0 | 1,505 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from heat.common import exception
from heat.common import short_id
from heat.common import template_format
from heat.engine.clients.os.keystone import fake_keystoneclient as fake_ks
from heat.engine import node_data
from heat.engine.resources.aws.iam import user
from heat.engine.resources.openstack.heat import access_policy as ap
from heat.engine import scheduler
from heat.engine import stk_defn
from heat.objects import resource_data as resource_data_object
from heat.tests import common
from heat.tests import utils
user_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Just a User",
"Parameters" : {},
"Resources" : {
"CfnUser" : {
"Type" : "AWS::IAM::User"
}
}
}
'''
user_template_password = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Just a User",
"Parameters" : {},
"Resources" : {
"CfnUser" : {
"Type" : "AWS::IAM::User",
"Properties": {
"LoginProfile": { "Password": "myP@ssW0rd" }
}
}
}
}
'''
user_accesskey_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Just a User",
"Parameters" : {},
"Resources" : {
"CfnUser" : {
"Type" : "AWS::IAM::User"
},
"HostKeys" : {
"Type" : "AWS::IAM::AccessKey",
"Properties" : {
"UserName" : {"Ref": "CfnUser"}
}
}
}
}
'''
user_policy_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Just a User",
"Parameters" : {},
"Resources" : {
"CfnUser" : {
"Type" : "AWS::IAM::User",
"Properties" : {
"Policies" : [ { "Ref": "WebServerAccessPolicy"} ]
}
},
"WebServerAccessPolicy" : {
"Type" : "OS::Heat::AccessPolicy",
"Properties" : {
"AllowedResources" : [ "WikiDatabase" ]
}
},
"WikiDatabase" : {
"Type" : "AWS::EC2::Instance",
}
}
}
'''
class UserTest(common.HeatTestCase):
def setUp(self):
super(UserTest, self).setUp()
self.stack_name = 'test_user_stack_%s' % utils.random_name()
self.username = '%s-CfnUser-aabbcc' % self.stack_name
self.fc = fake_ks.FakeKeystoneClient(username=self.username)
cfg.CONF.set_default('heat_stack_user_role', 'stack_user_role')
def create_user(self, t, stack, resource_name,
project_id, user_id='dummy_user',
password=None):
self.patchobject(user.User, 'keystone', return_value=self.fc)
self.mock_create_project = self.patchobject(
fake_ks.FakeKeystoneClient, 'create_stack_domain_project',
return_value=project_id)
resource_defns = stack.t.resource_definitions(stack)
rsrc = user.User(resource_name,
resource_defns[resource_name],
stack)
rsrc.store()
self.patchobject(short_id, 'get_id', return_value='aabbcc')
self.mock_create_user = self.patchobject(
fake_ks.FakeKeystoneClient, 'create_stack_domain_user',
return_value=user_id)
self.assertIsNone(rsrc.validate())
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
return rsrc
def test_user(self):
t = template_format.parse(user_template)
stack = utils.parse_stack(t, stack_name=self.stack_name)
project_id = 'stackproject'
rsrc = self.create_user(t, stack, 'CfnUser', project_id)
self.assertEqual('dummy_user', rsrc.resource_id)
self.assertEqual(self.username, rsrc.FnGetRefId())
self.assertRaises(exception.InvalidTemplateAttribute,
rsrc.FnGetAtt, 'Foo')
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.assertIsNone(rsrc.handle_suspend())
self.assertIsNone(rsrc.handle_resume())
rsrc.resource_id = None
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
rsrc.resource_id = self.fc.access
rsrc.state_set(rsrc.CREATE, rsrc.COMPLETE)
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
rsrc.state_set(rsrc.CREATE, rsrc.COMPLETE)
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.mock_create_project.assert_called_once_with(stack.id)
self.mock_create_user.assert_called_once_with(
password=None, project_id=project_id,
username=self.username)
def test_user_password(self):
t = template_format.parse(user_template_password)
stack = utils.parse_stack(t, stack_name=self.stack_name)
project_id = 'stackproject'
password = u'myP@ssW0rd'
rsrc = self.create_user(t, stack, 'CfnUser',
project_id=project_id,
password=password)
self.assertEqual('dummy_user', rsrc.resource_id)
self.assertEqual(self.username, rsrc.FnGetRefId())
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.mock_create_project.assert_called_once_with(stack.id)
self.mock_create_user.assert_called_once_with(
password=password, project_id=project_id,
username=self.username)
def test_user_validate_policies(self):
t = template_format.parse(user_policy_template)
stack = utils.parse_stack(t, stack_name=self.stack_name)
project_id = 'stackproject'
rsrc = self.create_user(t, stack, 'CfnUser', project_id)
self.assertEqual('dummy_user', rsrc.resource_id)
self.assertEqual(self.username, rsrc.FnGetRefId())
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.assertEqual([u'WebServerAccessPolicy'],
rsrc.properties['Policies'])
# OK
self.assertTrue(rsrc._validate_policies([u'WebServerAccessPolicy']))
# Resource name doesn't exist in the stack
self.assertFalse(rsrc._validate_policies([u'NoExistAccessPolicy']))
# Resource name is wrong Resource type
self.assertFalse(rsrc._validate_policies([u'NoExistAccessPolicy',
u'WikiDatabase']))
# Wrong type (AWS embedded policy format, not yet supported)
dict_policy = {"PolicyName": "AccessForCFNInit",
"PolicyDocument":
{"Statement": [{"Effect": "Allow",
"Action":
"cloudformation:DescribeStackResource",
"Resource": "*"}]}}
# However we should just ignore it to avoid breaking existing templates
self.assertTrue(rsrc._validate_policies([dict_policy]))
self.mock_create_project.assert_called_once_with(stack.id)
self.mock_create_user.assert_called_once_with(
password=None, project_id=project_id,
username=self.username)
def test_user_create_bad_policies(self):
t = template_format.parse(user_policy_template)
t['Resources']['CfnUser']['Properties']['Policies'] = ['NoExistBad']
stack = utils.parse_stack(t, stack_name=self.stack_name)
resource_name = 'CfnUser'
resource_defns = stack.t.resource_definitions(stack)
rsrc = user.User(resource_name,
resource_defns[resource_name],
stack)
self.assertRaises(exception.InvalidTemplateAttribute,
rsrc.handle_create)
def test_user_access_allowed(self):
def mock_access_allowed(resource):
return True if resource == 'a_resource' else False
self.patchobject(ap.AccessPolicy, 'access_allowed',
side_effect=mock_access_allowed)
t = template_format.parse(user_policy_template)
stack = utils.parse_stack(t, stack_name=self.stack_name)
project_id = 'stackproject'
rsrc = self.create_user(t, stack, 'CfnUser', project_id)
self.assertEqual('dummy_user', rsrc.resource_id)
self.assertEqual(self.username, rsrc.FnGetRefId())
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.assertTrue(rsrc.access_allowed('a_resource'))
self.assertFalse(rsrc.access_allowed('b_resource'))
self.mock_create_project.assert_called_once_with(stack.id)
self.mock_create_user.assert_called_once_with(
password=None, project_id=project_id,
username=self.username)
def test_user_access_allowed_ignorepolicy(self):
def mock_access_allowed(resource):
return True if resource == 'a_resource' else False
self.patchobject(ap.AccessPolicy, 'access_allowed',
side_effect=mock_access_allowed)
t = template_format.parse(user_policy_template)
t['Resources']['CfnUser']['Properties']['Policies'] = [
'WebServerAccessPolicy', {'an_ignored': 'policy'}]
stack = utils.parse_stack(t, stack_name=self.stack_name)
project_id = 'stackproject'
rsrc = self.create_user(t, stack, 'CfnUser', project_id)
self.assertEqual('dummy_user', rsrc.resource_id)
self.assertEqual(self.username, rsrc.FnGetRefId())
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.assertTrue(rsrc.access_allowed('a_resource'))
self.assertFalse(rsrc.access_allowed('b_resource'))
self.mock_create_project.assert_called_once_with(stack.id)
self.mock_create_user.assert_called_once_with(
password=None, project_id=project_id,
username=self.username)
def test_user_refid_rsrc_id(self):
t = template_format.parse(user_template)
stack = utils.parse_stack(t)
rsrc = stack['CfnUser']
rsrc.resource_id = 'phy-rsrc-id'
self.assertEqual('phy-rsrc-id', rsrc.FnGetRefId())
def test_user_refid_convg_cache_data(self):
t = template_format.parse(user_template)
cache_data = {'CfnUser': node_data.NodeData.from_dict({
'uuid': mock.ANY,
'id': mock.ANY,
'action': 'CREATE',
'status': 'COMPLETE',
'reference_id': 'convg_xyz'
})}
stack = utils.parse_stack(t, cache_data=cache_data)
rsrc = stack.defn['CfnUser']
self.assertEqual('convg_xyz', rsrc.FnGetRefId())
class AccessKeyTest(common.HeatTestCase):
def setUp(self):
super(AccessKeyTest, self).setUp()
self.username = utils.PhysName('test_stack', 'CfnUser')
self.credential_id = 'acredential123'
self.fc = fake_ks.FakeKeystoneClient(username=self.username,
user_id='dummy_user',
credential_id=self.credential_id)
cfg.CONF.set_default('heat_stack_user_role', 'stack_user_role')
def create_user(self, t, stack, resource_name,
project_id='stackproject', user_id='dummy_user',
password=None):
self.patchobject(user.User, 'keystone', return_value=self.fc)
rsrc = stack[resource_name]
self.assertIsNone(rsrc.validate())
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
stk_defn.update_resource_data(stack.defn, resource_name,
rsrc.node_data())
return rsrc
def create_access_key(self, t, stack, resource_name):
rsrc = stack[resource_name]
self.assertIsNone(rsrc.validate())
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
return rsrc
def test_access_key(self):
t = template_format.parse(user_accesskey_template)
stack = utils.parse_stack(t)
self.create_user(t, stack, 'CfnUser')
rsrc = self.create_access_key(t, stack, 'HostKeys')
self.assertEqual(self.fc.access,
rsrc.resource_id)
self.assertEqual(self.fc.secret,
rsrc._secret)
# Ensure the resource data has been stored correctly
rs_data = resource_data_object.ResourceData.get_all(rsrc)
self.assertEqual(self.fc.secret, rs_data.get('secret_key'))
self.assertEqual(self.fc.credential_id, rs_data.get('credential_id'))
self.assertEqual(2, len(rs_data.keys()))
self.assertEqual(utils.PhysName(stack.name, 'CfnUser'),
rsrc.FnGetAtt('UserName'))
rsrc._secret = None
self.assertEqual(self.fc.secret,
rsrc.FnGetAtt('SecretAccessKey'))
self.assertRaises(exception.InvalidTemplateAttribute,
rsrc.FnGetAtt, 'Foo')
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
def test_access_key_get_from_keystone(self):
self.patchobject(user.AccessKey, 'keystone', return_value=self.fc)
t = template_format.parse(user_accesskey_template)
stack = utils.parse_stack(t)
self.create_user(t, stack, 'CfnUser')
rsrc = self.create_access_key(t, stack, 'HostKeys')
# Delete the resource data for secret_key, to test that existing
# stacks which don't have the resource_data stored will continue
# working via retrieving the keypair from keystone
resource_data_object.ResourceData.delete(rsrc, 'credential_id')
resource_data_object.ResourceData.delete(rsrc, 'secret_key')
self.assertRaises(exception.NotFound,
resource_data_object.ResourceData.get_all,
rsrc)
rsrc._secret = None
rsrc._data = None
self.assertEqual(self.fc.secret,
rsrc.FnGetAtt('SecretAccessKey'))
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
def test_access_key_no_user(self):
t = template_format.parse(user_accesskey_template)
# Set the resource properties UserName to an unknown user
t['Resources']['HostKeys']['Properties']['UserName'] = 'NonExistent'
stack = utils.parse_stack(t)
stack['CfnUser'].resource_id = self.fc.user_id
resource_defns = stack.t.resource_definitions(stack)
rsrc = user.AccessKey('HostKeys',
resource_defns['HostKeys'],
stack)
create = scheduler.TaskRunner(rsrc.create)
self.assertRaises(exception.ResourceFailure, create)
self.assertEqual((rsrc.CREATE, rsrc.FAILED), rsrc.state)
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
class AccessPolicyTest(common.HeatTestCase):
def test_accesspolicy_create_ok(self):
t = template_format.parse(user_policy_template)
stack = utils.parse_stack(t)
resource_name = 'WebServerAccessPolicy'
resource_defns = stack.t.resource_definitions(stack)
rsrc = ap.AccessPolicy(resource_name,
resource_defns[resource_name],
stack)
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
def test_accesspolicy_create_ok_empty(self):
t = template_format.parse(user_policy_template)
resource_name = 'WebServerAccessPolicy'
t['Resources'][resource_name]['Properties']['AllowedResources'] = []
stack = utils.parse_stack(t)
resource_defns = stack.t.resource_definitions(stack)
rsrc = ap.AccessPolicy(resource_name,
resource_defns[resource_name],
stack)
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
def test_accesspolicy_create_err_notfound(self):
t = template_format.parse(user_policy_template)
resource_name = 'WebServerAccessPolicy'
t['Resources'][resource_name]['Properties']['AllowedResources'] = [
'NoExistResource']
stack = utils.parse_stack(t)
self.assertRaises(exception.StackValidationFailed, stack.validate)
def test_accesspolicy_access_allowed(self):
t = template_format.parse(user_policy_template)
resource_name = 'WebServerAccessPolicy'
stack = utils.parse_stack(t)
resource_defns = stack.t.resource_definitions(stack)
rsrc = ap.AccessPolicy(resource_name,
resource_defns[resource_name],
stack)
self.assertTrue(rsrc.access_allowed('WikiDatabase'))
self.assertFalse(rsrc.access_allowed('NotWikiDatabase'))
self.assertFalse(rsrc.access_allowed(None))
| noironetworks/heat | heat/tests/aws/test_user.py | Python | apache-2.0 | 17,881 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Xml.Linq;
using BuildCs.FileSystem;
namespace BuildCs.Xml
{
public class XmlHelper
{
public void Update(BuildItem file, Action<XDocument> config)
{
var doc = XDocument.Load(file);
config(doc);
doc.Save(file);
}
}
} | craiggwilson/buildcs | src/BuildCs.Core/Xml/XmlHelper.cs | C# | apache-2.0 | 419 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.sql;
import org.apache.calcite.rel.type.DynamicRecordType;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.sql.type.ArraySqlType;
import org.apache.calcite.sql.type.MapSqlType;
import org.apache.calcite.sql.type.MultisetSqlType;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.SqlOperandCountRanges;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.util.Util;
/**
* The <code>UNNEST</code> operator.
*/
public class SqlUnnestOperator extends SqlFunctionalOperator {
/** Whether {@code WITH ORDINALITY} was specified.
*
* <p>If so, the returned records include a column {@code ORDINALITY}. */
public final boolean withOrdinality;
public static final String ORDINALITY_COLUMN_NAME = "ORDINALITY";
public static final String MAP_KEY_COLUMN_NAME = "KEY";
public static final String MAP_VALUE_COLUMN_NAME = "VALUE";
//~ Constructors -----------------------------------------------------------
public SqlUnnestOperator(boolean withOrdinality) {
super(
"UNNEST",
SqlKind.UNNEST,
200,
true,
null,
null,
OperandTypes.repeat(SqlOperandCountRanges.from(1),
OperandTypes.SCALAR_OR_RECORD_COLLECTION_OR_MAP));
this.withOrdinality = withOrdinality;
}
//~ Methods ----------------------------------------------------------------
@Override public RelDataType inferReturnType(SqlOperatorBinding opBinding) {
final RelDataTypeFactory typeFactory = opBinding.getTypeFactory();
final RelDataTypeFactory.Builder builder = typeFactory.builder();
for (Integer operand : Util.range(opBinding.getOperandCount())) {
RelDataType type = opBinding.getOperandType(operand);
if (type.getSqlTypeName() == SqlTypeName.ANY) {
// When there is one operand with unknown type (ANY), the return type
// is dynamic star
return builder
.add(DynamicRecordType.DYNAMIC_STAR_PREFIX,
SqlTypeName.DYNAMIC_STAR)
.nullable(true)
.buildDynamic();
}
if (type.isStruct()) {
type = type.getFieldList().get(0).getType();
}
assert type instanceof ArraySqlType || type instanceof MultisetSqlType
|| type instanceof MapSqlType;
if (type instanceof MapSqlType) {
builder.add(MAP_KEY_COLUMN_NAME, type.getKeyType());
builder.add(MAP_VALUE_COLUMN_NAME, type.getValueType());
} else {
if (type.getComponentType().isStruct()) {
builder.addAll(type.getComponentType().getFieldList());
} else {
builder.add(SqlUtil.deriveAliasFromOrdinal(operand),
type.getComponentType());
}
}
}
if (withOrdinality) {
builder.add(ORDINALITY_COLUMN_NAME, SqlTypeName.INTEGER);
}
return builder.build();
}
@Override public void unparse(SqlWriter writer, SqlCall call, int leftPrec,
int rightPrec) {
super.unparse(writer, call, leftPrec, rightPrec);
if (withOrdinality) {
writer.keyword("WITH ORDINALITY");
}
}
public boolean argumentMustBeScalar(int ordinal) {
return false;
}
}
// End SqlUnnestOperator.java
| b-slim/calcite | core/src/main/java/org/apache/calcite/sql/SqlUnnestOperator.java | Java | apache-2.0 | 4,109 |
// Copyright 2014 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <gtest/gtest.h>
#include <glog/logging.h>
int main(int argc, char* argv[]) {
google::InitGoogleLogging(argv[0]);
::testing::InitGoogleTest(&argc, argv);
// Turn this on for verbose logging.
google::LogToStderr();
google::SetStderrLogging(google::GLOG_ERROR);
//google::SetStderrLogging(google::GLOG_INFO);
return RUN_ALL_TESTS();
}
| blschatz/aff4 | tests/aff4tests.cc | C++ | apache-2.0 | 968 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.computeoptimizer.model;
import javax.annotation.Generated;
/**
*
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public enum ExportableInstanceField {
AccountId("AccountId"),
InstanceArn("InstanceArn"),
InstanceName("InstanceName"),
Finding("Finding"),
FindingReasonCodes("FindingReasonCodes"),
LookbackPeriodInDays("LookbackPeriodInDays"),
CurrentInstanceType("CurrentInstanceType"),
UtilizationMetricsCpuMaximum("UtilizationMetricsCpuMaximum"),
UtilizationMetricsMemoryMaximum("UtilizationMetricsMemoryMaximum"),
UtilizationMetricsEbsReadOpsPerSecondMaximum("UtilizationMetricsEbsReadOpsPerSecondMaximum"),
UtilizationMetricsEbsWriteOpsPerSecondMaximum("UtilizationMetricsEbsWriteOpsPerSecondMaximum"),
UtilizationMetricsEbsReadBytesPerSecondMaximum("UtilizationMetricsEbsReadBytesPerSecondMaximum"),
UtilizationMetricsEbsWriteBytesPerSecondMaximum("UtilizationMetricsEbsWriteBytesPerSecondMaximum"),
UtilizationMetricsDiskReadOpsPerSecondMaximum("UtilizationMetricsDiskReadOpsPerSecondMaximum"),
UtilizationMetricsDiskWriteOpsPerSecondMaximum("UtilizationMetricsDiskWriteOpsPerSecondMaximum"),
UtilizationMetricsDiskReadBytesPerSecondMaximum("UtilizationMetricsDiskReadBytesPerSecondMaximum"),
UtilizationMetricsDiskWriteBytesPerSecondMaximum("UtilizationMetricsDiskWriteBytesPerSecondMaximum"),
UtilizationMetricsNetworkInBytesPerSecondMaximum("UtilizationMetricsNetworkInBytesPerSecondMaximum"),
UtilizationMetricsNetworkOutBytesPerSecondMaximum("UtilizationMetricsNetworkOutBytesPerSecondMaximum"),
UtilizationMetricsNetworkPacketsInPerSecondMaximum("UtilizationMetricsNetworkPacketsInPerSecondMaximum"),
UtilizationMetricsNetworkPacketsOutPerSecondMaximum("UtilizationMetricsNetworkPacketsOutPerSecondMaximum"),
CurrentOnDemandPrice("CurrentOnDemandPrice"),
CurrentStandardOneYearNoUpfrontReservedPrice("CurrentStandardOneYearNoUpfrontReservedPrice"),
CurrentStandardThreeYearNoUpfrontReservedPrice("CurrentStandardThreeYearNoUpfrontReservedPrice"),
CurrentVCpus("CurrentVCpus"),
CurrentMemory("CurrentMemory"),
CurrentStorage("CurrentStorage"),
CurrentNetwork("CurrentNetwork"),
RecommendationOptionsInstanceType("RecommendationOptionsInstanceType"),
RecommendationOptionsProjectedUtilizationMetricsCpuMaximum("RecommendationOptionsProjectedUtilizationMetricsCpuMaximum"),
RecommendationOptionsProjectedUtilizationMetricsMemoryMaximum("RecommendationOptionsProjectedUtilizationMetricsMemoryMaximum"),
RecommendationOptionsPlatformDifferences("RecommendationOptionsPlatformDifferences"),
RecommendationOptionsPerformanceRisk("RecommendationOptionsPerformanceRisk"),
RecommendationOptionsVcpus("RecommendationOptionsVcpus"),
RecommendationOptionsMemory("RecommendationOptionsMemory"),
RecommendationOptionsStorage("RecommendationOptionsStorage"),
RecommendationOptionsNetwork("RecommendationOptionsNetwork"),
RecommendationOptionsOnDemandPrice("RecommendationOptionsOnDemandPrice"),
RecommendationOptionsStandardOneYearNoUpfrontReservedPrice("RecommendationOptionsStandardOneYearNoUpfrontReservedPrice"),
RecommendationOptionsStandardThreeYearNoUpfrontReservedPrice("RecommendationOptionsStandardThreeYearNoUpfrontReservedPrice"),
RecommendationsSourcesRecommendationSourceArn("RecommendationsSourcesRecommendationSourceArn"),
RecommendationsSourcesRecommendationSourceType("RecommendationsSourcesRecommendationSourceType"),
LastRefreshTimestamp("LastRefreshTimestamp"),
CurrentPerformanceRisk("CurrentPerformanceRisk"),
RecommendationOptionsSavingsOpportunityPercentage("RecommendationOptionsSavingsOpportunityPercentage"),
RecommendationOptionsEstimatedMonthlySavingsCurrency("RecommendationOptionsEstimatedMonthlySavingsCurrency"),
RecommendationOptionsEstimatedMonthlySavingsValue("RecommendationOptionsEstimatedMonthlySavingsValue"),
EffectiveRecommendationPreferencesCpuVendorArchitectures("EffectiveRecommendationPreferencesCpuVendorArchitectures"),
EffectiveRecommendationPreferencesEnhancedInfrastructureMetrics("EffectiveRecommendationPreferencesEnhancedInfrastructureMetrics"),
EffectiveRecommendationPreferencesInferredWorkloadTypes("EffectiveRecommendationPreferencesInferredWorkloadTypes"),
InferredWorkloadTypes("InferredWorkloadTypes"),
RecommendationOptionsMigrationEffort("RecommendationOptionsMigrationEffort");
private String value;
private ExportableInstanceField(String value) {
this.value = value;
}
@Override
public String toString() {
return this.value;
}
/**
* Use this in place of valueOf.
*
* @param value
* real value
* @return ExportableInstanceField corresponding to the value
*
* @throws IllegalArgumentException
* If the specified value does not map to one of the known values in this enum.
*/
public static ExportableInstanceField fromValue(String value) {
if (value == null || "".equals(value)) {
throw new IllegalArgumentException("Value cannot be null or empty!");
}
for (ExportableInstanceField enumEntry : ExportableInstanceField.values()) {
if (enumEntry.toString().equals(value)) {
return enumEntry;
}
}
throw new IllegalArgumentException("Cannot create enum from " + value + " value!");
}
}
| aws/aws-sdk-java | aws-java-sdk-computeoptimizer/src/main/java/com/amazonaws/services/computeoptimizer/model/ExportableInstanceField.java | Java | apache-2.0 | 6,086 |
/*******************************************************************************
*
* Pentaho Mondrian Test Compatibility Kit
*
* Copyright (C) 2013-2014 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.mondrian.tck;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import mondrian.rolap.RolapUtil;
import org.apache.log4j.Logger;
import com.google.common.base.Function;
public class SqlExpectation {
static final Logger LOGGER = RolapUtil.SQL_LOGGER;
final ResultSetProvider query;
final String[] columns;
final boolean columnsPartial;
final String[] rows;
final boolean partial;
final int[] types;
List<Function<Statement, Void>> statementModifiers;
final int cancelTimeout;
final ResultSetValidator validator;
public SqlExpectation(
ResultSetProvider query,
String[] columns,
boolean columnsPartial,
int[] types,
String[] rows,
boolean partial,
int cancelTimeout,
final List<Function<Statement, Void>> statementModifiers ) {
this.query = query;
this.columns = columns;
this.columnsPartial = columnsPartial;
this.types = types;
this.rows = rows;
this.partial = partial;
this.cancelTimeout = cancelTimeout;
this.statementModifiers = statementModifiers;
this.validator = new ResultSetValidator( columns, columnsPartial, rows, partial, types );
}
public void verify( ResultSet rs ) throws Exception {
// Validate column names
validator.validateColumns( rs );
// Validate rows
validator.validateRows( rs );
}
public static Builder newBuilder() {
return new Builder();
}
public static class Builder {
private ResultSetProvider query;
private String[] columns;
private boolean columnsPartial;
private String[] rows;
private int[] types;
private int cancelTimeout = -1;
private boolean partial = false;
private List<Function<Statement, Void>> statementModifiers = new ArrayList<>();
private Builder() {
}
/**
* Sets the {@link ResultSetProvider} to run.
* <p>(mandatory)
*/
public Builder query( ResultSetProvider query ) {
this.query = query;
return this;
}
/**
* Sets the SQL query to run.
* <p>(mandatory)
*/
public Builder query( final String query ) {
return query( new ResultSetProvider() {
@Override
public ResultSet getData( Connection conn, final Statement statement ) throws Exception {
for ( Function<Statement, Void> statementModifier : statementModifiers ) {
statementModifier.apply( statement );
}
try {
// Run the query
SqlExpectation.LOGGER.info( "Mondrian.tck:" + query );
statement.execute( query );
} catch ( Throwable t ) {
throw new Exception(
"Query failed to run successfully:\n"
+ query,
t );
}
return statement.getResultSet();
}
} );
}
/**
* Sets the column names expected
* <p>(optional)
*/
public Builder columns( String... columns ) {
this.columns = columns;
return this;
}
/**
* Sets whether the columns provided in {@link #columns(String[])} are only the
* part of the columns of the result set.
* <p>(optional)
*/
public Builder columnsPartial() {
this.columnsPartial = true;
return this;
}
/**
* Sets the expected column types. Use values in {@link java.sql.Types}.
* <p>(optional)
*/
public Builder types( int... types ) {
this.types = types;
return this;
}
/**
* Sets the expected rows. The value delimiter is pipe ( '|' ).
* <p>(optional)
*/
public Builder rows( String... rows ) {
this.rows = rows;
return this;
}
/**
* Sets whether the rows provided in {@link #rows(String[])} are only the
* first rows of the result set and we didn't intend to validate them all.
*/
public Builder partial() {
this.partial = true;
return this;
}
/**
* adds a function that will be run for the statement before execution
*/
public Builder modifyStatement( Function<Statement, Void> statementModifier ) {
statementModifiers.add( statementModifier );
return this;
}
public Builder cancelTimeout( int to ) {
this.cancelTimeout = to;
return this;
}
public SqlExpectation build() {
return new SqlExpectation( query, columns, columnsPartial, types, rows, partial, cancelTimeout, statementModifiers );
}
}
/**
* This interface has to be implemented to provide a ResultSet to validate to
* the Expectation classes.
*
* <p>There are two arguments to the API, one for the connection and one for the
* statement. Note that this is required because the statements provided by the shims
* are not symmetrical. The bug can be represented as:
*
* <p><code>connection != connection.createStatement().getConnection()</code>
*/
public interface ResultSetProvider {
/**
* Returns {@link java.sql.ResultSet} executed by the {@link java.sql.Statement}<br/>
* <p>
* Code should be like <br/>
* {@code statement.<doSomething>; return statement.getResultSet();}
* </p>
*/
ResultSet getData( Connection conn, Statement statement ) throws Exception;
}
}
| pentaho/mondrian-tck | src/main/java/org/pentaho/mondrian/tck/SqlExpectation.java | Java | apache-2.0 | 6,275 |
/**
* @license
* Copyright 2021 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import {Complex, ComplexInputs, KernelConfig, KernelFunc, TensorInfo} from '@tensorflow/tfjs-core';
import {WebGPUBackend} from '../backend_webgpu';
import {identity} from './Identity';
/**
* Complex tensors share data with their real and imaginary components. Complex
* tensors' reference to the components is tracked by refCount on the individual
* component. The refCounts are increased by the identity call.
*
* When a complex tensor is disposed, it will reduce the refCount on the
* components by calling disposeData on each.
*/
export function complex(args: {inputs: ComplexInputs, backend: WebGPUBackend}):
TensorInfo {
const {inputs, backend} = args;
const {real, imag} = inputs;
const complexInfo = backend.makeTensorInfo(real.shape, 'complex64');
const complex = backend.tensorMap.get(complexInfo.dataId);
const realTensorInfo = identity({inputs: {x: real}, backend});
const imagTensorInfo = identity({inputs: {x: imag}, backend});
complex.complexTensorInfos = {real: realTensorInfo, imag: imagTensorInfo};
return complexInfo;
}
export const complexConfig: KernelConfig = {
kernelName: Complex,
backendName: 'webgpu',
kernelFunc: complex as {} as KernelFunc
};
| tensorflow/tfjs | tfjs-backend-webgpu/src/kernels/Complex.ts | TypeScript | apache-2.0 | 1,916 |
/*************************************************************************
ListeIdentifiants - Symbole de l'analyseur
-------------------
début : 8 mars 2016 08:23:14
copyright : (C) 2016 par H4112
*************************************************************************/
//---------- Réalisation de la classe <ListeIdentifiants> (fichier ListeIdentifiants.cpp) --
//---------------------------------------------------------------- INCLUDE
//-------------------------------------------------------- Include système
#include <iostream>
using namespace std;
//------------------------------------------------------ Include personnel
#include "ListeIdentifiants.h"
//------------------------------------------------------------- Constantes
//---------------------------------------------------- Variables de classe
//----------------------------------------------------------- Types privés
//----------------------------------------------------------------- PUBLIC
//-------------------------------------------------------- Fonctions amies
//----------------------------------------------------- Méthodes publiques
void ListeIdentifiants::AjouterVariable ( Identifiant * id )
{
ajouterDeclaration(new Variable(*id));
delete id;
}
//------------------------------------------------- Surcharge d'opérateurs
//-------------------------------------------- Constructeurs - destructeur
ListeIdentifiants::ListeIdentifiants ( )
: ListeDeclaration( LID )
{
#ifdef MAP
cout << "Appel au constructeur de <ListeIdentifiants>" << endl;
#endif
} //----- Fin de ListeIdentifiants
ListeIdentifiants::~ListeIdentifiants ( )
{
#ifdef MAP
cout << "Appel au destructeur de <ListeIdentifiants>" << endl;
#endif
} //----- Fin de ~ListeIdentifiants
//------------------------------------------------------------------ PRIVE
//----------------------------------------------------- Méthodes protégées
//------------------------------------------------------- Méthodes privées
| H4112/GrammairesLangages | rendu/src/symboles/ListeIdentifiants.cpp | C++ | apache-2.0 | 2,091 |
/*
* Copyright 2019 Baidu, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.baidubce.services.tsdb.model;
import java.util.List;
import com.google.common.collect.Lists;
/**
* Represent the Query for querying datapoints from Tsdb.
*/
public class Query {
/**
* Required.
* The metric name.
*/
private String metric;
/**
* Optional.
* The field name, conflict with fields parameter.
*/
private String field;
/**
* Optional.
* The field names, conflict with field parameter.
*/
private List<String> fields;
/**
* Optional.
* The tag keys.
*/
private List<String> tags;
/**
* Required.
* The filters.
*/
private Filters filters;
/**
* Optional.
* GroupBy list.
*/
private List<GroupBy> groupBy;
/**
* Optional.
* The limit number of datapoints that Tsdb return. When not set this field, the default number of
* datapoints that Tsdb return is 200000.
*/
private Integer limit;
/**
* Optional.
* The start offset of datapoints that Tsdb return. Default: 0.
*/
private Integer offset;
/**
* Optional.
* Aggregator list.
*/
private List<Aggregator> aggregators;
/**
* Optional.
* The order: Asc or Desc, default is Asc.
*/
private String order;
/**
* Optional.
* The fill for interpolation.
*/
private Fill fill;
/**
* Optional.
* The marker which is used to get data started from.
* It should be the value of nextMarker of previous query request's result or null for the first query request.
*/
private String marker;
public String getMarker() {
return marker;
}
public void setMarker(String marker) {
this.marker = marker;
}
public Fill getFill() {
return fill;
}
public void setFill(Fill fill) {
this.fill = fill;
}
public String getMetric() {
return metric;
}
public void setMetric(String metric) {
this.metric = metric;
}
public String getField() {
return field;
}
public void setField(String field) {
this.field = field;
}
public List<String> getFields() {
return fields;
}
public void setFields(List<String> fields) {
this.fields = fields;
}
public List<String> getTags() {
return tags;
}
public void setTags(List<String> tags) {
this.tags = tags;
}
public Filters getFilters() {
return filters;
}
public void setFilters(Filters filters) {
this.filters = filters;
}
public List<GroupBy> getGroupBy() {
return groupBy;
}
public void setGroupBy(List<GroupBy> groupBy) {
this.groupBy = groupBy;
}
public Integer getLimit() {
return limit;
}
public void setLimit(Integer limit) {
this.limit = limit;
}
public Integer getOffset() {
return offset;
}
public void setOffset(Integer offset) {
this.offset = offset;
}
public List<Aggregator> getAggregators() {
return aggregators;
}
public void setAggregators(List<Aggregator> aggregators) {
this.aggregators = aggregators;
}
public String getOrder() {
return order;
}
public void setOrder(String order) {
this.order = order;
}
public Query withMarker(String marker) {
this.marker = marker;
return this;
}
public Query withFill(Fill fill) {
this.fill = fill;
return this;
}
public Query withMetric(String metric) {
this.metric = metric;
return this;
}
public Query withField(String field) {
this.field = field;
return this;
}
public Query withFields(List<String> fields) {
this.fields = fields;
return this;
}
public Query withTags(List<String> tags) {
this.tags = tags;
return this;
}
public Query withFilters(Filters filters) {
this.filters = filters;
return this;
}
public Query withGroupBy(List<GroupBy> groupBy) {
this.groupBy = groupBy;
return this;
}
public Query withLimit(int limit) {
this.limit = limit;
return this;
}
public Query withOffset(int offset) {
this.offset = offset;
return this;
}
public Query withAggregators(List<Aggregator> aggregators) {
this.aggregators = aggregators;
return this;
}
public Query withOrder(String order) {
this.order = order;
return this;
}
public Query addAggregator(Aggregator aggregator) {
initialAggregators();
this.aggregators.add(aggregator);
return this;
}
private void initialAggregators() {
if (aggregators == null) {
aggregators = Lists.newArrayList();
}
}
public Query addGroupBy(GroupBy groupBy) {
initialGroupBy();
this.groupBy.add(groupBy);
return this;
}
private void initialGroupBy() {
if (groupBy == null) {
groupBy = Lists.newArrayList();
}
}
}
| baidubce/bce-sdk-java | src/main/java/com/baidubce/services/tsdb/model/Query.java | Java | apache-2.0 | 5,848 |
/*
Copyright 2015 Hendrik Saly
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package de.saly.es.example.audit.plugin;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.PluginsService;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
import org.elasticsearch.test.ElasticsearchIntegrationTest.Scope;
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
import org.junit.Assert;
@ClusterScope(scope = Scope.TEST, numDataNodes = 3, numClientNodes = 2)
public abstract class AbstractUnitTest extends ElasticsearchIntegrationTest {
@Override
public Settings indexSettings() {
final ImmutableSettings.Builder builder = ImmutableSettings.builder();
builder.put(SETTING_NUMBER_OF_SHARDS, 3);
builder.put(SETTING_NUMBER_OF_REPLICAS, 1);
return builder.build();
}
@Override
protected Settings nodeSettings(final int nodeOrdinal) {
return ImmutableSettings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put("gateway.type", "none")
.put("plugins." + PluginsService.LOAD_PLUGIN_FROM_CLASSPATH, true).put("script.disable_dynamic", false)
//.put("node.local", false)
.put("http.cors.enabled", true).put("http.enabled", true).build();
}
@Override
protected Settings transportClientSettings() {
return ImmutableSettings.builder().put("plugins.load_classpath_plugins", true).build();
}
public static void assertHitCountWithMsg(final String msg, final SearchResponse searchResponse, final long expectedHitCount) {
if (searchResponse.getHits().totalHits() != expectedHitCount) {
Assert.fail("Hit count is " + searchResponse.getHits().totalHits() + " but " + expectedHitCount + " was expected. "
+ ElasticsearchAssertions.formatShardStatus(searchResponse) + " due to " + msg);
}
ElasticsearchAssertions.assertVersionSerializable(searchResponse);
}
}
| salyh/elasticsearch-sample-plugin-audit | src/test/java/de/saly/es/example/audit/plugin/AbstractUnitTest.java | Java | apache-2.0 | 2,851 |
from collections import defaultdict
from copy import deepcopy
from geopy.geocoders import Nominatim
import Util
import twitter
import json
import time
import string
import stop_words
geolocator = Nominatim()
STOP_WORDS = stop_words.get_stop_words('english')
api = twitter.Api(consumer_key='b170h2arKC4VoITriN5jIjFRN',
consumer_secret='z2npapLunYynvp9E783KsTiTMUR4CE6jgGIFqXOdzmXNkYI7g9',
access_token_key='3842613073-L7vq82QRYRGCbO1kzN9bYfjfbbV7kOpWWLYnBGG',
access_token_secret='FU6AJWG4iDHfzQWhjKB1r3SIwoyzTcgFe0LjyNfq8r6aR')
global cached_query_results = {}
global cached_user_results = {}
def search_tweets(query, max_searches=5, override_cache=False):
"""Searches for tweets that match query.
Args:
query: The search query string. Can be a phrase or hashtag.
See https://dev.twitter.com/rest/reference/get/search/tweets
max_searches: The maximum number of API searches that will be
executed for the given query. Default value is 5 searches.
100 tweets can be obtained per API search, so by default
a maximum of 500 tweets will be returned.
override_cache: Whether to execute a search even if there is
already a cached result for the query. Defaults to False.
Returns:
A list of tweet objects matching the query with most recent
tweets first.
Raises:
UserWarning: If override_cache is set to False and result for
input query has already been cached.
"""
if query in cached_query_results and override_cache is not False:
raise UserWarning('input query {0} is already in '
'cached_query_results'.format(query))
remaining_timeout = api.GetSleepTime('/search/tweets')
if remaining_timeout != 0:
print ('searchTweets() must wait {0} seconds in order to not exceed '
'the Twitter API rate limit.'.format(remaining_timeout + 1))
time.sleep(remaining_timeout + 1)
result = []
search_result = api.GetSearch(term=query, count=100) # could also add lang='en'
result.extend(search_result)
oldest_tweet_id = min([t.GetId() for t in search_result])
num_searches = 1
while len(search_result) == 100 and num_searches < max_searches:
search_result = _search_tweets_aux(query, oldest_tweet_id)
oldest_tweet_id = min([t.GetId() for t in search_result])
result.extend(search_result)
num_searches += 1
global cached_query_results
cached_query_results[query] = result
return result
def _search_tweets_aux(query, max_tweet_id):
"""Auxiliary helper function for search_tweets."""
remaining_timeout = api.GetSleepTime('/search/tweets')
if remaining_timeout != 0:
print ('searchTweets() must wait {0} seconds in order to not exceed '
'the Twitter API rate limit.'.format(remaining_timeout + 1))
time.sleep(remaining_timeout + 1)
search_result = api.GetSearch(term=query, count=100, max_id=max_tweet_id - 1)
return search_result
def get_coordinate_list(tweets):
"""Gets list of (longitude, latitude) tuples for tweets in list.
Args:
tweets: List of tweet objects to extract geo coordinates from.
Will ignore tweets in list for which geo coordinates cannot
be extracted.
Returns:
List of (longitude, latitude) tuples for tweets in list.
"""
coord_list = []
for tweet in tweets:
coords = get_coordinates(tweet)
if coords:
coord_list.append(coords)
return coord_list
def get_coordinates(tweet):
"""Gets longitude and latitude of tweet.
Args:
tweet: The tweet object to extract geo coordinates from.
Returns:
Tuple of (longitude, latitude) for the input tweet. Returns
False if unable to extract geo coordinates for tweet.
"""
# try to get tweet geo coordinates directly if available
coordinates = tweet.GetCoordinates()
if coordinates:
return coordinates
# otherwise parase geo coordinates form user location if available
location = tweet.user.location
if location:
coordinates = geolocator.geocode(location)
if coordinates:
return coordinates.longitude, coordinates.latitude
# not able to extract geo coordinates, so return False
return False
def no_duplicate_tweets(tweets):
"""Returns True iff tweets in input list are all unique."""
ids = set()
for tweet in tweets:
tweet_id = tweet.GetId()
if tweet_id in ids:
return False
ids.add(tweet_id)
return True
def tweets_to_text_strings(tweets):
"""Converts list of tweets to list of tweet text strings."""
return [tweet.GetText() for tweet in tweets]
def tweets_to_word_counter(tweets, normalize=False, lowercase=True):
"""Converts list of tweets to dict of word counts.
Args:
tweets: List of tweet objects to process.
normalize: Whether to return frequencies instead of counts.
Default value is False (return counts).
lowercase: Whether to convert all words to lowercase.
Default value if True.
Returns:
util.Counter object containing counts of words in the tweets.
Words are keys, counts are values. If normalize is set to True,
then function will return word frequencies as values.
"""
word_counter = util.Counter()
for tweet in tweets:
word_counter += string_to_nonstopword_counter(tweet.GetText())
if normalize:
word_counter.normalize()
return word_counter
def string_to_nonstopword_list(text):
"""Returns list of non-stopwords in string.
Args:
text: The string to process.
Returns:
List of non-stopwords in text string. Punctuation, whitespace,
and hyperlinks are removed. Hashtag and @USERNAME punctionation
is not removed.
"""
# split strings into words and remove whitespace:
words = text.split()
# remove non-hashtag and non-username punctionation:
chars_to_remove = list(deepcopy(string.punctuation))
chars_to_remove.remove('#')
chars_to_remove.remove('@')
chars_to_remove = ''.join(chars_to_remove)
words = [word.strip(chars_to_remove) for word in words]
# remove empty strings:
words = [word for word in words if word]
# remove stopwords:
words = filter(lambda w: w.lower() not in STOP_WORDS, words)
# remove hyperlinks:
words = filter(lambda w: not (len(w) > 7 and w[0:9] == 'https://'), words)
# remove non ascii characters:
to_return = []
for word in words:
valid = True
for char in word:
if char not in string.printable:
valid = False
break
if valid:
to_return.append(word)
return to_return
def string_to_nonstopword_counter(text, lowercase=True):
"""Converts string to util.Counter of non-stopwords in text string.
Args:
text: The string to process.
lowercase: Whether the convert the words in the string to lowercase.
Returns:
util.Counter object containing counts of non-stopwords in string.
Punctuation, whitespace, and hyperlinks are removed. Hashtag
and @USERNAME punctionation is not removed.
"""
words = string_to_nonstopword_list(text)
word_counter = util.Counter()
for word in words:
if lowercase:
word = word.lower()
word_counter[word] += 1
return word_counter
def get_user_tweets(username, max_searches=5, override_cache=False):
"""Searches for tweets that match query.
Args:
username: The username of the Twitter account that tweets will
be downloaded for.
max_searches: The maximum number of API searches that will be
executed for the given user. Default value is 5 searches.
200 tweets can be obtained per API search, so by default
a maximum of 1000 tweets will be returned.
override_cache: Whether to execute a search even if there is
already a cached result for the specifed Twitter user.
Defaults to False.
Returns:
A list of tweet objects corresponding to the specified users's
public tweets, with their most recent tweets first.
"""
if username in cached_user_results and override_cache is not False:
raise UserWarning('input username {0} is already in '
'cached_user_results'.format(query))
remaining_timeout = api.GetSleepTime('/search/tweets') # might need to change this
if remaining_timeout != 0:
print ('searchTweets() must wait {0} seconds in order to not exceed '
'the Twitter API rate limit.'.format(remaining_timeout + 1))
time.sleep(remaining_timeout + 1)
result = []
search_result = api.GetUserTimeline(screen_name=username, count=200) # could also add lang='en'
result.extend(search_result)
oldest_tweet_id = min([t.GetId() for t in search_result])
num_searches = 1
while len(search_result) == 200 and num_searches < max_searches:
search_result = _get_user_tweets_aux(username, oldest_tweet_id)
if not search_result:
break
oldest_tweet_id = min([t.GetId() for t in search_result])
result.extend(search_result)
num_searches += 1
global cached_user_results
cached_user_results[username] = result
return result
def _get_user_tweets_aux(username, max_tweet_id):
"""Auxiliary helper function for search_tweets."""
remaining_timeout = api.GetSleepTime('/search/tweets')
if remaining_timeout != 0:
print ('searchTweets() must wait {0} seconds in order to not exceed '
'the Twitter API rate limit.'.format(remaining_timeout + 1))
time.sleep(remaining_timeout + 1)
search_result = api.GetUserTimeline(screen_name=username, count=200,
max_id=max_tweet_id - 1)
return search_result
def split_words_hashtags_usermentions(word_counter):
"""Splits all words into words, hashtags, and usermentions counters."""
pure_word_counter = util.Counter()
hashtag_counter = util.Counter()
usermentions_counter = util.Counter()
for word in word_counter:
if word[0] == '#':
hashtag_counter[word] = word_counter[word]
elif word[0] == '@':
usermentions_counter[word] = word_counter[word]
else:
pure_word_counter[word] = word_counter[word]
return pure_word_counter, hashtag_counter, usermentions_counter
| ZacWilson047/TwitterProject | TwitterModule.py | Python | apache-2.0 | 10,730 |
/*
* Copyright © 2009 HotPads (admin@hotpads.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.datarouter.tasktracker.storage;
import java.util.List;
import javax.inject.Inject;
import javax.inject.Singleton;
import io.datarouter.instrumentation.task.TaskTrackerBatchDto;
import io.datarouter.scanner.Scanner;
import io.datarouter.storage.Datarouter;
import io.datarouter.storage.client.ClientId;
import io.datarouter.storage.dao.BaseDao;
import io.datarouter.storage.dao.BaseRedundantDaoParams;
import io.datarouter.storage.node.factory.NodeFactory;
import io.datarouter.storage.node.op.combo.SortedMapStorage;
import io.datarouter.storage.node.op.combo.SortedMapStorage.SortedMapStorageNode;
import io.datarouter.storage.tag.Tag;
import io.datarouter.tasktracker.storage.LongRunningTask.LongRunningTaskFielder;
import io.datarouter.util.tuple.Range;
import io.datarouter.virtualnode.redundant.RedundantSortedMapStorageNode;
@Singleton
public class LongRunningTaskDao extends BaseDao{
public static class LongRunningTaskDaoParams extends BaseRedundantDaoParams{
public LongRunningTaskDaoParams(List<ClientId> clientId){
super(clientId);
}
}
private final SortedMapStorageNode<LongRunningTaskKey,LongRunningTask,LongRunningTaskFielder> node;
@Inject
public LongRunningTaskDao(Datarouter datarouter, NodeFactory nodeFactory, LongRunningTaskDaoParams params){
super(datarouter);
node = Scanner.of(params.clientIds)
.map(clientId -> {
SortedMapStorageNode<LongRunningTaskKey,LongRunningTask,LongRunningTaskFielder> node =
nodeFactory.create(clientId, LongRunningTask::new, LongRunningTaskFielder::new)
.withTag(Tag.DATAROUTER)
.disableNodewatchPercentageAlert()
.build();
return node;
})
.listTo(RedundantSortedMapStorageNode::makeIfMulti);
datarouter.register(node);
}
public SortedMapStorage<LongRunningTaskKey,LongRunningTask> getNode(){
return node;
}
public Scanner<LongRunningTask> scan(){
return node.scan();
}
public Scanner<LongRunningTask> scan(Range<LongRunningTaskKey> range){
return node.scan(range);
}
public Scanner<LongRunningTask> scanWithPrefix(LongRunningTaskKey prefix){
return node.scanWithPrefix(prefix);
}
public Scanner<LongRunningTaskKey> scanKeysWithPrefix(LongRunningTaskKey prefix){
return node.scanKeysWithPrefix(prefix);
}
public void deleteBatched(Scanner<LongRunningTaskKey> keys){
node.deleteBatched(keys);
}
public Scanner<TaskTrackerBatchDto> scanAll(int batchSize){
// probably not necessary to scan the whole table and send everything
// TODO figure out what data we want to send and add some filtering
return node.scan()
.map(LongRunningTask::toDto)
.batch(batchSize)
.map(TaskTrackerBatchDto::new);
}
}
| hotpads/datarouter | datarouter-task-tracker/src/main/java/io/datarouter/tasktracker/storage/LongRunningTaskDao.java | Java | apache-2.0 | 3,299 |
package com.example.tests;
import static org.junit.Assert.assertThat;
import static org.hamcrest.Matchers.*;
import java.util.Random;
import org.testng.annotations.Test;
import com.example.utils.SortedListOf;
public class GroupModificationTest extends TestBase {
@Test(dataProvider = "randomValidGroupGenerator")
public void modifySomeGroup(GroupData group) {
// save old state
SortedListOf<GroupData> oldList = new SortedListOf<GroupData>(app
.getModel().getGroups());
Random rnd = new Random();
int index = rnd.nextInt(oldList.size() - 1);
// actions
app.getGroupHelper().modifyGroup(index, group);
// save new state
SortedListOf<GroupData> newList = new SortedListOf<GroupData>(app
.getModel().getGroups());
// compare states
assertThat(newList, equalTo(oldList.without(index).withAdded(group)));
if (wantToCheck()) {
if ("yes".equals(app.getProperty("check.db"))) {
assertThat(app.getModel().getGroups(), equalTo(app
.getHibernateHelper().listGroups()));
}
if ("yes".equals(app.getProperty("check.ui"))) {
assertThat(app.getModel().getGroups(), equalTo(app
.getGroupHelper().getUiGroups()));
}
}
}
}
| mradkevich/JavaForQA | src/com/example/tests/GroupModificationTest.java | Java | apache-2.0 | 1,184 |
import {HttpJsonResponse, HttpUploadOptions} from '../type'
import {HttpResponseUnexpectedStatusError, HttpResponseBodyParseError} from './error'
import * as http from '../service/http'
const getBodyTextIfPossible = async (response: Response): Promise<string | null> => {
try {
return await response.text()
} catch (_) {
return null
}
}
const processResponse = async (response: Response): Promise<HttpJsonResponse> => {
if (response.ok === false) {
throw new HttpResponseUnexpectedStatusError(
'2xx',
response,
await getBodyTextIfPossible(response)
)
}
try {
return {
json: await response.json(),
http: response
}
} catch (err) {
throw new HttpResponseBodyParseError(
err.message,
response,
await getBodyTextIfPossible(response)
)
}
}
export const fetch = async (
url: string,
options: {
headers?: {[header: string]: string}
method?: string
body?: any
} = {}
): Promise<HttpJsonResponse> => {
const fetchOptions: RequestInit = {
headers: new http.Headers(options.headers || {'Content-Type': 'application/json'}),
method: options.method || 'GET'
}
if (options.body) {
fetchOptions.body = JSON.stringify(options.body)
}
return processResponse(await http.fetch(url, fetchOptions))
}
export const upload = async (options: HttpUploadOptions) =>
processResponse(await http.upload(options))
export const fetchWithRetry = async (
url: string,
options: {
headers?: {[header: string]: string}
method?: string
body?: any
} = {},
retries = 3
): Promise<HttpJsonResponse> => {
let retryCount = retries
const tryToFetch = async (): Promise<HttpJsonResponse> => {
try {
return await fetch(url, options)
} catch (error) {
retryCount--
if (retryCount > 0) {
return tryToFetch()
}
throw error
}
}
return tryToFetch()
}
| all3dp/printing-engine-client | src/app/lib/http-json.ts | TypeScript | apache-2.0 | 1,926 |
package com.gh.mygreen.xlsmapper.fieldaccessor;
import java.util.Optional;
import com.gh.mygreen.xlsmapper.util.CellPosition;
/**
* フィールドの位置情報を取得するためのインタフェース。
*
* @since 2.0
* @author T.TSUCHIE
*
*/
@FunctionalInterface
public interface PositionGetter {
/**
* フィールドの位置情報を取得します。
* @param beanObj フィールドが定義してあるクラスのインスタンス
* @return 位置情報がない場合は、空を返します。
*/
Optional<CellPosition> get(Object beanObj);
}
| mygreen/xlsmapper | src/main/java/com/gh/mygreen/xlsmapper/fieldaccessor/PositionGetter.java | Java | apache-2.0 | 633 |
// Copyright 2021 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package restore
import (
"context"
"sort"
"sync"
"time"
"github.com/pingcap/errors"
"github.com/pingcap/failpoint"
"github.com/pingcap/tidb/br/pkg/lightning/backend"
"github.com/pingcap/tidb/br/pkg/lightning/backend/kv"
"github.com/pingcap/tidb/br/pkg/lightning/checkpoints"
"github.com/pingcap/tidb/br/pkg/lightning/common"
"github.com/pingcap/tidb/br/pkg/lightning/config"
"github.com/pingcap/tidb/br/pkg/lightning/glue"
"github.com/pingcap/tidb/br/pkg/lightning/log"
"github.com/pingcap/tidb/br/pkg/lightning/metric"
"github.com/pingcap/tidb/br/pkg/lightning/mydump"
verify "github.com/pingcap/tidb/br/pkg/lightning/verification"
"github.com/pingcap/tidb/br/pkg/lightning/worker"
"github.com/pingcap/tidb/br/pkg/utils"
"github.com/pingcap/tidb/meta/autoid"
"github.com/pingcap/tidb/parser/model"
"github.com/pingcap/tidb/parser/mysql"
"github.com/pingcap/tidb/table"
"github.com/pingcap/tidb/table/tables"
"go.uber.org/multierr"
"go.uber.org/zap"
)
type TableRestore struct {
// The unique table name in the form "`db`.`tbl`".
tableName string
dbInfo *checkpoints.TidbDBInfo
tableInfo *checkpoints.TidbTableInfo
tableMeta *mydump.MDTableMeta
encTable table.Table
alloc autoid.Allocators
logger log.Logger
ignoreColumns []string
}
func NewTableRestore(
tableName string,
tableMeta *mydump.MDTableMeta,
dbInfo *checkpoints.TidbDBInfo,
tableInfo *checkpoints.TidbTableInfo,
cp *checkpoints.TableCheckpoint,
ignoreColumns []string,
) (*TableRestore, error) {
idAlloc := kv.NewPanickingAllocators(cp.AllocBase)
tbl, err := tables.TableFromMeta(idAlloc, tableInfo.Core)
if err != nil {
return nil, errors.Annotatef(err, "failed to tables.TableFromMeta %s", tableName)
}
return &TableRestore{
tableName: tableName,
dbInfo: dbInfo,
tableInfo: tableInfo,
tableMeta: tableMeta,
encTable: tbl,
alloc: idAlloc,
logger: log.With(zap.String("table", tableName)),
ignoreColumns: ignoreColumns,
}, nil
}
func (tr *TableRestore) Close() {
tr.encTable = nil
tr.logger.Info("restore done")
}
func (tr *TableRestore) populateChunks(ctx context.Context, rc *Controller, cp *checkpoints.TableCheckpoint) error {
task := tr.logger.Begin(zap.InfoLevel, "load engines and files")
chunks, err := mydump.MakeTableRegions(ctx, tr.tableMeta, len(tr.tableInfo.Core.Columns), rc.cfg, rc.ioWorkers, rc.store)
if err == nil {
timestamp := time.Now().Unix()
failpoint.Inject("PopulateChunkTimestamp", func(v failpoint.Value) {
timestamp = int64(v.(int))
})
for _, chunk := range chunks {
engine, found := cp.Engines[chunk.EngineID]
if !found {
engine = &checkpoints.EngineCheckpoint{
Status: checkpoints.CheckpointStatusLoaded,
}
cp.Engines[chunk.EngineID] = engine
}
ccp := &checkpoints.ChunkCheckpoint{
Key: checkpoints.ChunkCheckpointKey{
Path: chunk.FileMeta.Path,
Offset: chunk.Chunk.Offset,
},
FileMeta: chunk.FileMeta,
ColumnPermutation: nil,
Chunk: chunk.Chunk,
Timestamp: timestamp,
}
if len(chunk.Chunk.Columns) > 0 {
perms, err := parseColumnPermutations(tr.tableInfo.Core, chunk.Chunk.Columns, tr.ignoreColumns)
if err != nil {
return errors.Trace(err)
}
ccp.ColumnPermutation = perms
}
engine.Chunks = append(engine.Chunks, ccp)
}
// Add index engine checkpoint
cp.Engines[indexEngineID] = &checkpoints.EngineCheckpoint{Status: checkpoints.CheckpointStatusLoaded}
}
task.End(zap.ErrorLevel, err,
zap.Int("enginesCnt", len(cp.Engines)),
zap.Int("filesCnt", len(chunks)),
)
return err
}
func (tr *TableRestore) RebaseChunkRowIDs(cp *checkpoints.TableCheckpoint, rowIDBase int64) {
if rowIDBase == 0 {
return
}
for _, engine := range cp.Engines {
for _, chunk := range engine.Chunks {
chunk.Chunk.PrevRowIDMax += rowIDBase
chunk.Chunk.RowIDMax += rowIDBase
}
}
}
// initializeColumns computes the "column permutation" for an INSERT INTO
// statement. Suppose a table has columns (a, b, c, d) in canonical order, and
// we execute `INSERT INTO (d, b, a) VALUES ...`, we will need to remap the
// columns as:
//
// - column `a` is at position 2
// - column `b` is at position 1
// - column `c` is missing
// - column `d` is at position 0
//
// The column permutation of (d, b, a) is set to be [2, 1, -1, 0].
//
// The argument `columns` _must_ be in lower case.
func (tr *TableRestore) initializeColumns(columns []string, ccp *checkpoints.ChunkCheckpoint) error {
colPerm, err := createColumnPermutation(columns, tr.ignoreColumns, tr.tableInfo.Core)
if err != nil {
return err
}
ccp.ColumnPermutation = colPerm
return nil
}
func createColumnPermutation(columns []string, ignoreColumns []string, tableInfo *model.TableInfo) ([]int, error) {
var colPerm []int
if len(columns) == 0 {
colPerm = make([]int, 0, len(tableInfo.Columns)+1)
shouldIncludeRowID := common.TableHasAutoRowID(tableInfo)
// no provided columns, so use identity permutation.
for i := range tableInfo.Columns {
colPerm = append(colPerm, i)
}
if shouldIncludeRowID {
colPerm = append(colPerm, -1)
}
} else {
var err error
colPerm, err = parseColumnPermutations(tableInfo, columns, ignoreColumns)
if err != nil {
return nil, errors.Trace(err)
}
}
return colPerm, nil
}
func (tr *TableRestore) restoreEngines(pCtx context.Context, rc *Controller, cp *checkpoints.TableCheckpoint) error {
indexEngineCp := cp.Engines[indexEngineID]
if indexEngineCp == nil {
tr.logger.Error("fail to restoreEngines because indexengine is nil")
return errors.Errorf("table %v index engine checkpoint not found", tr.tableName)
}
// If there is an index engine only, it indicates no data needs to restore.
// So we can change status to imported directly and avoid opening engine.
if len(cp.Engines) == 1 {
if err := rc.saveStatusCheckpoint(pCtx, tr.tableName, indexEngineID, nil, checkpoints.CheckpointStatusImported); err != nil {
return errors.Trace(err)
}
if err := rc.saveStatusCheckpoint(pCtx, tr.tableName, checkpoints.WholeTableEngineID, nil, checkpoints.CheckpointStatusIndexImported); err != nil {
return errors.Trace(err)
}
return nil
}
ctx, cancel := context.WithCancel(pCtx)
defer cancel()
// The table checkpoint status set to `CheckpointStatusIndexImported` only if
// both all data engines and the index engine had been imported to TiKV.
// But persist index engine checkpoint status and table checkpoint status are
// not an atomic operation, so `cp.Status < CheckpointStatusIndexImported`
// but `indexEngineCp.Status == CheckpointStatusImported` could happen
// when kill lightning after saving index engine checkpoint status before saving
// table checkpoint status.
var closedIndexEngine *backend.ClosedEngine
var restoreErr error
// if index-engine checkpoint is lower than `CheckpointStatusClosed`, there must be
// data-engines that need to be restore or import. Otherwise, all data-engines should
// be finished already.
idxEngineCfg := &backend.EngineConfig{
TableInfo: tr.tableInfo,
}
if indexEngineCp.Status < checkpoints.CheckpointStatusClosed {
indexWorker := rc.indexWorkers.Apply()
defer rc.indexWorkers.Recycle(indexWorker)
if rc.cfg.TikvImporter.Backend == config.BackendLocal {
// for index engine, the estimate factor is non-clustered index count
idxCnt := len(tr.tableInfo.Core.Indices)
if !common.TableHasAutoRowID(tr.tableInfo.Core) {
idxCnt--
}
threshold := estimateCompactionThreshold(cp, int64(idxCnt))
idxEngineCfg.Local = &backend.LocalEngineConfig{
Compact: threshold > 0,
CompactConcurrency: 4,
CompactThreshold: threshold,
}
}
// import backend can't reopen engine if engine is closed, so
// only open index engine if any data engines don't finish writing.
var indexEngine *backend.OpenedEngine
var err error
for engineID, engine := range cp.Engines {
if engineID == indexEngineID {
continue
}
if engine.Status < checkpoints.CheckpointStatusAllWritten {
indexEngine, err = rc.backend.OpenEngine(ctx, idxEngineCfg, tr.tableName, indexEngineID)
if err != nil {
return errors.Trace(err)
}
break
}
}
logTask := tr.logger.Begin(zap.InfoLevel, "import whole table")
var wg sync.WaitGroup
var engineErr common.OnceError
setError := func(err error) {
engineErr.Set(err)
// cancel this context to fail fast
cancel()
}
type engineCheckpoint struct {
engineID int32
checkpoint *checkpoints.EngineCheckpoint
}
allEngines := make([]engineCheckpoint, 0, len(cp.Engines))
for engineID, engine := range cp.Engines {
allEngines = append(allEngines, engineCheckpoint{engineID: engineID, checkpoint: engine})
}
sort.Slice(allEngines, func(i, j int) bool { return allEngines[i].engineID < allEngines[j].engineID })
for _, ecp := range allEngines {
engineID := ecp.engineID
engine := ecp.checkpoint
select {
case <-ctx.Done():
// Set engineErr and break this for loop to wait all the sub-routines done before return.
// Directly return may cause panic because caller will close the pebble db but some sub routines
// are still reading from or writing to the pebble db.
engineErr.Set(ctx.Err())
default:
}
if engineErr.Get() != nil {
break
}
// Should skip index engine
if engineID < 0 {
continue
}
if engine.Status < checkpoints.CheckpointStatusImported {
wg.Add(1)
// If the number of chunks is small, it means that this engine may be finished in a few times.
// We do not limit it in TableConcurrency
restoreWorker := rc.tableWorkers.Apply()
go func(w *worker.Worker, eid int32, ecp *checkpoints.EngineCheckpoint) {
defer wg.Done()
engineLogTask := tr.logger.With(zap.Int32("engineNumber", eid)).Begin(zap.InfoLevel, "restore engine")
dataClosedEngine, err := tr.restoreEngine(ctx, rc, indexEngine, eid, ecp)
engineLogTask.End(zap.ErrorLevel, err)
rc.tableWorkers.Recycle(w)
if err == nil {
dataWorker := rc.closedEngineLimit.Apply()
defer rc.closedEngineLimit.Recycle(dataWorker)
err = tr.importEngine(ctx, dataClosedEngine, rc, eid, ecp)
if rc.status != nil {
for _, chunk := range ecp.Chunks {
rc.status.FinishedFileSize.Add(chunk.Chunk.EndOffset - chunk.Key.Offset)
}
}
}
if err != nil {
setError(err)
}
}(restoreWorker, engineID, engine)
} else {
for _, chunk := range engine.Chunks {
rc.status.FinishedFileSize.Add(chunk.Chunk.EndOffset - chunk.Key.Offset)
}
}
}
wg.Wait()
restoreErr = engineErr.Get()
logTask.End(zap.ErrorLevel, restoreErr)
if restoreErr != nil {
return errors.Trace(restoreErr)
}
if indexEngine != nil {
closedIndexEngine, restoreErr = indexEngine.Close(ctx, idxEngineCfg)
} else {
closedIndexEngine, restoreErr = rc.backend.UnsafeCloseEngine(ctx, idxEngineCfg, tr.tableName, indexEngineID)
}
if err = rc.saveStatusCheckpoint(ctx, tr.tableName, indexEngineID, restoreErr, checkpoints.CheckpointStatusClosed); err != nil {
return errors.Trace(firstErr(restoreErr, err))
}
} else if indexEngineCp.Status == checkpoints.CheckpointStatusClosed {
// If index engine file has been closed but not imported only if context cancel occurred
// when `importKV()` execution, so `UnsafeCloseEngine` and continue import it.
closedIndexEngine, restoreErr = rc.backend.UnsafeCloseEngine(ctx, idxEngineCfg, tr.tableName, indexEngineID)
}
if restoreErr != nil {
return errors.Trace(restoreErr)
}
if cp.Status < checkpoints.CheckpointStatusIndexImported {
var err error
if indexEngineCp.Status < checkpoints.CheckpointStatusImported {
err = tr.importKV(ctx, closedIndexEngine, rc, indexEngineID)
}
failpoint.Inject("FailBeforeIndexEngineImported", func() {
panic("forcing failure due to FailBeforeIndexEngineImported")
})
saveCpErr := rc.saveStatusCheckpoint(ctx, tr.tableName, checkpoints.WholeTableEngineID, err, checkpoints.CheckpointStatusIndexImported)
if err = firstErr(err, saveCpErr); err != nil {
return errors.Trace(err)
}
}
return nil
}
func (tr *TableRestore) restoreEngine(
pCtx context.Context,
rc *Controller,
indexEngine *backend.OpenedEngine,
engineID int32,
cp *checkpoints.EngineCheckpoint,
) (*backend.ClosedEngine, error) {
ctx, cancel := context.WithCancel(pCtx)
defer cancel()
// all data has finished written, we can close the engine directly.
if cp.Status >= checkpoints.CheckpointStatusAllWritten {
engineCfg := &backend.EngineConfig{
TableInfo: tr.tableInfo,
}
closedEngine, err := rc.backend.UnsafeCloseEngine(ctx, engineCfg, tr.tableName, engineID)
// If any error occurred, recycle worker immediately
if err != nil {
return closedEngine, errors.Trace(err)
}
return closedEngine, nil
}
// if the key are ordered, LocalWrite can optimize the writing.
// table has auto-incremented _tidb_rowid must satisfy following restrictions:
// - clustered index disable and primary key is not number
// - no auto random bits (auto random or shard row id)
// - no partition table
// - no explicit _tidb_rowid field (At this time we can't determine if the source file contains _tidb_rowid field,
// so we will do this check in LocalWriter when the first row is received.)
hasAutoIncrementAutoID := common.TableHasAutoRowID(tr.tableInfo.Core) &&
tr.tableInfo.Core.AutoRandomBits == 0 && tr.tableInfo.Core.ShardRowIDBits == 0 &&
tr.tableInfo.Core.Partition == nil
dataWriterCfg := &backend.LocalWriterConfig{
IsKVSorted: hasAutoIncrementAutoID,
}
logTask := tr.logger.With(zap.Int32("engineNumber", engineID)).Begin(zap.InfoLevel, "encode kv data and write")
dataEngineCfg := &backend.EngineConfig{
TableInfo: tr.tableInfo,
Local: &backend.LocalEngineConfig{},
}
if !tr.tableMeta.IsRowOrdered {
dataEngineCfg.Local.Compact = true
dataEngineCfg.Local.CompactConcurrency = 4
dataEngineCfg.Local.CompactThreshold = compactionUpperThreshold
}
dataEngine, err := rc.backend.OpenEngine(ctx, dataEngineCfg, tr.tableName, engineID)
if err != nil {
return nil, errors.Trace(err)
}
var wg sync.WaitGroup
var chunkErr common.OnceError
type chunkFlushStatus struct {
dataStatus backend.ChunkFlushStatus
indexStatus backend.ChunkFlushStatus
chunkCp *checkpoints.ChunkCheckpoint
}
// chunks that are finished writing, but checkpoints are not finished due to flush not finished.
var checkFlushLock sync.Mutex
flushPendingChunks := make([]chunkFlushStatus, 0, 16)
chunkCpChan := make(chan *checkpoints.ChunkCheckpoint, 16)
go func() {
for {
select {
case cp, ok := <-chunkCpChan:
if !ok {
return
}
saveCheckpoint(rc, tr, engineID, cp)
case <-ctx.Done():
return
}
}
}()
// Restore table data
for chunkIndex, chunk := range cp.Chunks {
if chunk.Chunk.Offset >= chunk.Chunk.EndOffset {
continue
}
checkFlushLock.Lock()
finished := 0
for _, c := range flushPendingChunks {
if c.indexStatus.Flushed() && c.dataStatus.Flushed() {
chunkCpChan <- c.chunkCp
finished++
} else {
break
}
}
if finished > 0 {
flushPendingChunks = flushPendingChunks[finished:]
}
checkFlushLock.Unlock()
select {
case <-pCtx.Done():
return nil, pCtx.Err()
default:
}
if chunkErr.Get() != nil {
break
}
// Flows :
// 1. read mydump file
// 2. sql -> kvs
// 3. load kvs data (into kv deliver server)
// 4. flush kvs data (into tikv node)
cr, err := newChunkRestore(ctx, chunkIndex, rc.cfg, chunk, rc.ioWorkers, rc.store, tr.tableInfo)
if err != nil {
return nil, errors.Trace(err)
}
var remainChunkCnt float64
if chunk.Chunk.Offset < chunk.Chunk.EndOffset {
remainChunkCnt = float64(chunk.Chunk.EndOffset-chunk.Chunk.Offset) / float64(chunk.Chunk.EndOffset-chunk.Key.Offset)
metric.ChunkCounter.WithLabelValues(metric.ChunkStatePending).Add(remainChunkCnt)
}
restoreWorker := rc.regionWorkers.Apply()
wg.Add(1)
dataWriter, err := dataEngine.LocalWriter(ctx, dataWriterCfg)
if err != nil {
return nil, errors.Trace(err)
}
indexWriter, err := indexEngine.LocalWriter(ctx, &backend.LocalWriterConfig{})
if err != nil {
return nil, errors.Trace(err)
}
go func(w *worker.Worker, cr *chunkRestore) {
// Restore a chunk.
defer func() {
cr.close()
wg.Done()
rc.regionWorkers.Recycle(w)
}()
metric.ChunkCounter.WithLabelValues(metric.ChunkStateRunning).Add(remainChunkCnt)
err := cr.restore(ctx, tr, engineID, dataWriter, indexWriter, rc)
var dataFlushStatus, indexFlushStaus backend.ChunkFlushStatus
if err == nil {
dataFlushStatus, err = dataWriter.Close(ctx)
}
if err == nil {
indexFlushStaus, err = indexWriter.Close(ctx)
}
if err == nil {
metric.ChunkCounter.WithLabelValues(metric.ChunkStateFinished).Add(remainChunkCnt)
metric.BytesCounter.WithLabelValues(metric.TableStateWritten).Add(float64(cr.chunk.Checksum.SumSize()))
if dataFlushStatus != nil && indexFlushStaus != nil {
if dataFlushStatus.Flushed() && indexFlushStaus.Flushed() {
saveCheckpoint(rc, tr, engineID, cr.chunk)
} else {
checkFlushLock.Lock()
flushPendingChunks = append(flushPendingChunks, chunkFlushStatus{
dataStatus: dataFlushStatus,
indexStatus: indexFlushStaus,
chunkCp: cr.chunk,
})
checkFlushLock.Unlock()
}
}
} else {
metric.ChunkCounter.WithLabelValues(metric.ChunkStateFailed).Add(remainChunkCnt)
chunkErr.Set(err)
cancel()
}
}(restoreWorker, cr)
}
wg.Wait()
// Report some statistics into the log for debugging.
totalKVSize := uint64(0)
totalSQLSize := int64(0)
for _, chunk := range cp.Chunks {
totalKVSize += chunk.Checksum.SumSize()
totalSQLSize += chunk.Chunk.EndOffset - chunk.Chunk.Offset
}
err = chunkErr.Get()
logTask.End(zap.ErrorLevel, err,
zap.Int64("read", totalSQLSize),
zap.Uint64("written", totalKVSize),
)
trySavePendingChunks := func(flushCtx context.Context) error {
checkFlushLock.Lock()
cnt := 0
for _, chunk := range flushPendingChunks {
if chunk.dataStatus.Flushed() && chunk.indexStatus.Flushed() {
saveCheckpoint(rc, tr, engineID, chunk.chunkCp)
cnt++
} else {
break
}
}
flushPendingChunks = flushPendingChunks[cnt:]
checkFlushLock.Unlock()
return nil
}
// in local mode, this check-point make no sense, because we don't do flush now,
// so there may be data lose if exit at here. So we don't write this checkpoint
// here like other mode.
if !rc.isLocalBackend() {
if saveCpErr := rc.saveStatusCheckpoint(ctx, tr.tableName, engineID, err, checkpoints.CheckpointStatusAllWritten); saveCpErr != nil {
return nil, errors.Trace(firstErr(err, saveCpErr))
}
}
if err != nil {
// if process is canceled, we should flush all chunk checkpoints for local backend
if rc.isLocalBackend() && common.IsContextCanceledError(err) {
// ctx is canceled, so to avoid Close engine failed, we use `context.Background()` here
if _, err2 := dataEngine.Close(context.Background(), dataEngineCfg); err2 != nil {
log.L().Warn("flush all chunk checkpoints failed before manually exits", zap.Error(err2))
return nil, errors.Trace(err)
}
if err2 := trySavePendingChunks(context.Background()); err2 != nil {
log.L().Warn("flush all chunk checkpoints failed before manually exits", zap.Error(err2))
}
}
return nil, errors.Trace(err)
}
closedDataEngine, err := dataEngine.Close(ctx, dataEngineCfg)
// For local backend, if checkpoint is enabled, we must flush index engine to avoid data loss.
// this flush action impact up to 10% of the performance, so we only do it if necessary.
if err == nil && rc.cfg.Checkpoint.Enable && rc.isLocalBackend() {
if err = indexEngine.Flush(ctx); err != nil {
return nil, errors.Trace(err)
}
if err = trySavePendingChunks(ctx); err != nil {
return nil, errors.Trace(err)
}
}
saveCpErr := rc.saveStatusCheckpoint(ctx, tr.tableName, engineID, err, checkpoints.CheckpointStatusClosed)
if err = firstErr(err, saveCpErr); err != nil {
// If any error occurred, recycle worker immediately
return nil, errors.Trace(err)
}
return closedDataEngine, nil
}
func (tr *TableRestore) importEngine(
ctx context.Context,
closedEngine *backend.ClosedEngine,
rc *Controller,
engineID int32,
cp *checkpoints.EngineCheckpoint,
) error {
if cp.Status >= checkpoints.CheckpointStatusImported {
return nil
}
// 1. calling import
if err := tr.importKV(ctx, closedEngine, rc, engineID); err != nil {
return errors.Trace(err)
}
// 2. perform a level-1 compact if idling.
if rc.cfg.PostRestore.Level1Compact && rc.compactState.CAS(compactStateIdle, compactStateDoing) {
go func() {
// we ignore level-1 compact failure since it is not fatal.
// no need log the error, it is done in (*Importer).Compact already.
_ = rc.doCompact(ctx, Level1Compact)
rc.compactState.Store(compactStateIdle)
}()
}
return nil
}
// postProcess execute rebase-auto-id/checksum/analyze according to the task config.
//
// if the parameter forcePostProcess to true, postProcess force run checksum and analyze even if the
// post-process-at-last config is true. And if this two phases are skipped, the first return value will be true.
func (tr *TableRestore) postProcess(
ctx context.Context,
rc *Controller,
cp *checkpoints.TableCheckpoint,
forcePostProcess bool,
metaMgr tableMetaMgr,
) (bool, error) {
// there are no data in this table, no need to do post process
// this is important for tables that are just the dump table of views
// because at this stage, the table was already deleted and replaced by the related view
if !rc.backend.ShouldPostProcess() || len(cp.Engines) == 1 {
return false, nil
}
// 3. alter table set auto_increment
if cp.Status < checkpoints.CheckpointStatusAlteredAutoInc {
rc.alterTableLock.Lock()
tblInfo := tr.tableInfo.Core
var err error
if tblInfo.PKIsHandle && tblInfo.ContainsAutoRandomBits() {
err = AlterAutoRandom(ctx, rc.tidbGlue.GetSQLExecutor(), tr.tableName, tr.alloc.Get(autoid.AutoRandomType).Base()+1)
} else if common.TableHasAutoRowID(tblInfo) || tblInfo.GetAutoIncrementColInfo() != nil {
// only alter auto increment id iff table contains auto-increment column or generated handle
err = AlterAutoIncrement(ctx, rc.tidbGlue.GetSQLExecutor(), tr.tableName, tr.alloc.Get(autoid.RowIDAllocType).Base()+1)
}
rc.alterTableLock.Unlock()
saveCpErr := rc.saveStatusCheckpoint(ctx, tr.tableName, checkpoints.WholeTableEngineID, err, checkpoints.CheckpointStatusAlteredAutoInc)
if err = firstErr(err, saveCpErr); err != nil {
return false, err
}
cp.Status = checkpoints.CheckpointStatusAlteredAutoInc
}
// tidb backend don't need checksum & analyze
if !rc.backend.ShouldPostProcess() {
tr.logger.Debug("skip checksum & analyze, not supported by this backend")
err := rc.saveStatusCheckpoint(ctx, tr.tableName, checkpoints.WholeTableEngineID, nil, checkpoints.CheckpointStatusAnalyzeSkipped)
return false, errors.Trace(err)
}
if !forcePostProcess && rc.cfg.PostRestore.PostProcessAtLast {
return true, nil
}
w := rc.checksumWorks.Apply()
defer rc.checksumWorks.Recycle(w)
shouldSkipAnalyze := false
if cp.Status < checkpoints.CheckpointStatusChecksumSkipped {
// 4. do table checksum
var localChecksum verify.KVChecksum
for _, engine := range cp.Engines {
for _, chunk := range engine.Chunks {
localChecksum.Add(&chunk.Checksum)
}
}
tr.logger.Info("local checksum", zap.Object("checksum", &localChecksum))
// 4.5. do duplicate detection.
hasDupe := false
if rc.cfg.TikvImporter.DuplicateResolution != config.DupeResAlgNone {
opts := &kv.SessionOptions{
SQLMode: mysql.ModeStrictAllTables,
SysVars: rc.sysVars,
}
var err error
hasLocalDupe, err := rc.backend.CollectLocalDuplicateRows(ctx, tr.encTable, tr.tableName, opts)
if err != nil {
tr.logger.Error("collect local duplicate keys failed", log.ShortError(err))
return false, err
} else {
hasDupe = hasLocalDupe
}
}
needChecksum, needRemoteDupe, baseTotalChecksum, err := metaMgr.CheckAndUpdateLocalChecksum(ctx, &localChecksum, hasDupe)
if err != nil {
return false, err
}
if needRemoteDupe && rc.cfg.TikvImporter.DuplicateResolution != config.DupeResAlgNone {
opts := &kv.SessionOptions{
SQLMode: mysql.ModeStrictAllTables,
SysVars: rc.sysVars,
}
hasRemoteDupe, e := rc.backend.CollectRemoteDuplicateRows(ctx, tr.encTable, tr.tableName, opts)
if e != nil {
tr.logger.Error("collect remote duplicate keys failed", log.ShortError(e))
return false, e
} else {
hasDupe = hasDupe || hasRemoteDupe
}
if err = rc.backend.ResolveDuplicateRows(ctx, tr.encTable, tr.tableName, rc.cfg.TikvImporter.DuplicateResolution); err != nil {
tr.logger.Error("resolve remote duplicate keys failed", log.ShortError(err))
return false, err
}
}
nextStage := checkpoints.CheckpointStatusChecksummed
if rc.cfg.PostRestore.Checksum != config.OpLevelOff && !hasDupe && needChecksum {
if cp.Checksum.SumKVS() > 0 || baseTotalChecksum.SumKVS() > 0 {
localChecksum.Add(&cp.Checksum)
localChecksum.Add(baseTotalChecksum)
tr.logger.Info("merged local checksum", zap.Object("checksum", &localChecksum))
}
var remoteChecksum *RemoteChecksum
remoteChecksum, err = DoChecksum(ctx, tr.tableInfo)
if err != nil {
return false, err
}
err = tr.compareChecksum(remoteChecksum, localChecksum)
// with post restore level 'optional', we will skip checksum error
if rc.cfg.PostRestore.Checksum == config.OpLevelOptional {
if err != nil {
tr.logger.Warn("compare checksum failed, will skip this error and go on", log.ShortError(err))
err = nil
}
}
} else {
switch {
case rc.cfg.PostRestore.Checksum == config.OpLevelOff:
tr.logger.Info("skip checksum because the checksum option is off")
case hasDupe:
tr.logger.Info("skip checksum&analyze because duplicates were detected")
shouldSkipAnalyze = true
case !needChecksum:
tr.logger.Info("skip checksum&analyze because other lightning instance will do this")
shouldSkipAnalyze = true
}
err = nil
nextStage = checkpoints.CheckpointStatusChecksumSkipped
}
// Don't call FinishTable when other lightning will calculate checksum.
if err == nil && !hasDupe && needChecksum {
err = metaMgr.FinishTable(ctx)
}
saveCpErr := rc.saveStatusCheckpoint(ctx, tr.tableName, checkpoints.WholeTableEngineID, err, nextStage)
if err = firstErr(err, saveCpErr); err != nil {
return false, errors.Trace(err)
}
cp.Status = nextStage
}
// 5. do table analyze
if cp.Status < checkpoints.CheckpointStatusAnalyzeSkipped {
switch {
case shouldSkipAnalyze || rc.cfg.PostRestore.Analyze == config.OpLevelOff:
tr.logger.Info("skip analyze")
if err := rc.saveStatusCheckpoint(ctx, tr.tableName, checkpoints.WholeTableEngineID, nil, checkpoints.CheckpointStatusAnalyzeSkipped); err != nil {
return false, errors.Trace(err)
}
cp.Status = checkpoints.CheckpointStatusAnalyzeSkipped
case forcePostProcess || !rc.cfg.PostRestore.PostProcessAtLast:
err := tr.analyzeTable(ctx, rc.tidbGlue.GetSQLExecutor())
// witch post restore level 'optional', we will skip analyze error
if rc.cfg.PostRestore.Analyze == config.OpLevelOptional {
if err != nil {
tr.logger.Warn("analyze table failed, will skip this error and go on", log.ShortError(err))
err = nil
}
}
saveCpErr := rc.saveStatusCheckpoint(ctx, tr.tableName, checkpoints.WholeTableEngineID, err, checkpoints.CheckpointStatusAnalyzed)
if err = firstErr(err, saveCpErr); err != nil {
return false, errors.Trace(err)
}
cp.Status = checkpoints.CheckpointStatusAnalyzed
default:
return true, nil
}
}
return true, nil
}
func parseColumnPermutations(tableInfo *model.TableInfo, columns []string, ignoreColumns []string) ([]int, error) {
colPerm := make([]int, 0, len(tableInfo.Columns)+1)
columnMap := make(map[string]int)
for i, column := range columns {
columnMap[column] = i
}
ignoreMap := make(map[string]int)
for _, column := range ignoreColumns {
if i, ok := columnMap[column]; ok {
ignoreMap[column] = i
}
}
tableColumnMap := make(map[string]int)
for i, col := range tableInfo.Columns {
tableColumnMap[col.Name.L] = i
}
// check if there are some unknown columns
var unknownCols []string
for _, c := range columns {
if _, ok := tableColumnMap[c]; !ok && c != model.ExtraHandleName.L {
if _, ignore := ignoreMap[c]; !ignore {
unknownCols = append(unknownCols, c)
}
}
}
if len(unknownCols) > 0 {
return colPerm, errors.Errorf("unknown columns in header %s", unknownCols)
}
for _, colInfo := range tableInfo.Columns {
if i, ok := columnMap[colInfo.Name.L]; ok {
if _, ignore := ignoreMap[colInfo.Name.L]; !ignore {
colPerm = append(colPerm, i)
} else {
log.L().Debug("column ignored by user requirements",
zap.Stringer("table", tableInfo.Name),
zap.String("colName", colInfo.Name.O),
zap.Stringer("colType", &colInfo.FieldType),
)
colPerm = append(colPerm, -1)
}
} else {
if len(colInfo.GeneratedExprString) == 0 {
log.L().Warn("column missing from data file, going to fill with default value",
zap.Stringer("table", tableInfo.Name),
zap.String("colName", colInfo.Name.O),
zap.Stringer("colType", &colInfo.FieldType),
)
}
colPerm = append(colPerm, -1)
}
}
if i, ok := columnMap[model.ExtraHandleName.L]; ok {
colPerm = append(colPerm, i)
} else if common.TableHasAutoRowID(tableInfo) {
colPerm = append(colPerm, -1)
}
return colPerm, nil
}
func (tr *TableRestore) importKV(
ctx context.Context,
closedEngine *backend.ClosedEngine,
rc *Controller,
engineID int32,
) error {
task := closedEngine.Logger().Begin(zap.InfoLevel, "import and cleanup engine")
regionSplitSize := int64(rc.cfg.TikvImporter.RegionSplitSize)
if regionSplitSize == 0 && rc.taskMgr != nil {
regionSplitSize = int64(config.SplitRegionSize)
rc.taskMgr.CheckTasksExclusively(ctx, func(tasks []taskMeta) ([]taskMeta, error) {
if len(tasks) > 0 {
regionSplitSize = int64(config.SplitRegionSize) * int64(utils.MinInt(len(tasks), config.MaxSplitRegionSizeRatio))
}
return nil, nil
})
}
err := closedEngine.Import(ctx, regionSplitSize)
saveCpErr := rc.saveStatusCheckpoint(ctx, tr.tableName, engineID, err, checkpoints.CheckpointStatusImported)
// Don't clean up when save checkpoint failed, because we will verifyLocalFile and import engine again after restart.
if err == nil && saveCpErr == nil {
err = multierr.Append(err, closedEngine.Cleanup(ctx))
}
err = firstErr(err, saveCpErr)
dur := task.End(zap.ErrorLevel, err)
if err != nil {
return errors.Trace(err)
}
metric.ImportSecondsHistogram.Observe(dur.Seconds())
failpoint.Inject("SlowDownImport", func() {})
return nil
}
// do checksum for each table.
func (tr *TableRestore) compareChecksum(remoteChecksum *RemoteChecksum, localChecksum verify.KVChecksum) error {
if remoteChecksum.Checksum != localChecksum.Sum() ||
remoteChecksum.TotalKVs != localChecksum.SumKVS() ||
remoteChecksum.TotalBytes != localChecksum.SumSize() {
return errors.Errorf("checksum mismatched remote vs local => (checksum: %d vs %d) (total_kvs: %d vs %d) (total_bytes:%d vs %d)",
remoteChecksum.Checksum, localChecksum.Sum(),
remoteChecksum.TotalKVs, localChecksum.SumKVS(),
remoteChecksum.TotalBytes, localChecksum.SumSize(),
)
}
tr.logger.Info("checksum pass", zap.Object("local", &localChecksum))
return nil
}
func (tr *TableRestore) analyzeTable(ctx context.Context, g glue.SQLExecutor) error {
task := tr.logger.Begin(zap.InfoLevel, "analyze")
err := g.ExecuteWithLog(ctx, "ANALYZE TABLE "+tr.tableName, "analyze table", tr.logger)
task.End(zap.ErrorLevel, err)
return err
}
// estimate SST files compression threshold by total row file size
// with a higher compression threshold, the compression time increases, but the iteration time decreases.
// Try to limit the total SST files number under 500. But size compress 32GB SST files cost about 20min,
// we set the upper bound to 32GB to avoid too long compression time.
// factor is the non-clustered(1 for data engine and number of non-clustered index count for index engine).
func estimateCompactionThreshold(cp *checkpoints.TableCheckpoint, factor int64) int64 {
totalRawFileSize := int64(0)
var lastFile string
for _, engineCp := range cp.Engines {
for _, chunk := range engineCp.Chunks {
if chunk.FileMeta.Path == lastFile {
continue
}
size := chunk.FileMeta.FileSize
if chunk.FileMeta.Type == mydump.SourceTypeParquet {
// parquet file is compressed, thus estimates with a factor of 2
size *= 2
}
totalRawFileSize += size
lastFile = chunk.FileMeta.Path
}
}
totalRawFileSize *= factor
// try restrict the total file number within 512
threshold := totalRawFileSize / 512
threshold = utils.NextPowerOfTwo(threshold)
if threshold < compactionLowerThreshold {
// disable compaction if threshold is smaller than lower bound
threshold = 0
} else if threshold > compactionUpperThreshold {
threshold = compactionUpperThreshold
}
return threshold
}
| c4pt0r/tidb | br/pkg/lightning/restore/table_restore.go | GO | apache-2.0 | 34,111 |
/**
* Copyright (c) 2013-2019 Contributors to the Eclipse Foundation
*
* <p> See the NOTICE file distributed with this work for additional information regarding copyright
* ownership. All rights reserved. This program and the accompanying materials are made available
* under the terms of the Apache License, Version 2.0 which accompanies this distribution and is
* available at http://www.apache.org/licenses/LICENSE-2.0.txt
*/
package org.locationtech.geowave.adapter.raster.adapter.merge;
import java.awt.image.SampleModel;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.locationtech.geowave.adapter.raster.adapter.RasterTile;
import org.locationtech.geowave.adapter.raster.util.SampleModelPersistenceUtils;
import org.locationtech.geowave.core.index.ByteArrayUtils;
import org.locationtech.geowave.core.index.Mergeable;
import org.locationtech.geowave.core.index.VarintUtils;
import org.locationtech.geowave.core.index.persist.Persistable;
import org.locationtech.geowave.core.index.persist.PersistenceUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
public class MultiAdapterServerMergeStrategy<T extends Persistable> implements
ServerMergeStrategy,
Mergeable {
private static final Logger LOGGER =
LoggerFactory.getLogger(MultiAdapterServerMergeStrategy.class);
// the purpose for these maps instead of a list of samplemodel and adapter
// ID pairs is to allow for multiple adapters to share the same sample model
protected Map<Integer, SampleModel> sampleModels = new HashMap<>();
public Map<Short, Integer> adapterIdToSampleModelKey = new HashMap<>();
public Map<Integer, RasterTileMergeStrategy<T>> childMergeStrategies = new HashMap<>();
public Map<Short, Integer> adapterIdToChildMergeStrategyKey = new HashMap<>();
public MultiAdapterServerMergeStrategy() {}
public MultiAdapterServerMergeStrategy(
final SingleAdapterServerMergeStrategy singleAdapterMergeStrategy) {
sampleModels.put(0, singleAdapterMergeStrategy.sampleModel);
adapterIdToSampleModelKey.put(singleAdapterMergeStrategy.internalAdapterId, 0);
childMergeStrategies.put(0, singleAdapterMergeStrategy.mergeStrategy);
adapterIdToChildMergeStrategyKey.put(singleAdapterMergeStrategy.internalAdapterId, 0);
}
public SampleModel getSampleModel(final short internalAdapterId) {
synchronized (this) {
final Integer sampleModelId = adapterIdToSampleModelKey.get(internalAdapterId);
if (sampleModelId != null) {
return sampleModels.get(sampleModelId);
}
return null;
}
}
public RasterTileMergeStrategy<T> getChildMergeStrategy(final short internalAdapterId) {
synchronized (this) {
final Integer childMergeStrategyId = adapterIdToChildMergeStrategyKey.get(internalAdapterId);
if (childMergeStrategyId != null) {
return childMergeStrategies.get(childMergeStrategyId);
}
return null;
}
}
@Override
public void merge(final Mergeable merge) {
synchronized (this) {
if ((merge != null) && (merge instanceof MultiAdapterServerMergeStrategy)) {
final MultiAdapterServerMergeStrategy<T> other = (MultiAdapterServerMergeStrategy) merge;
mergeMaps(
sampleModels,
adapterIdToSampleModelKey,
other.sampleModels,
other.adapterIdToSampleModelKey);
mergeMaps(
childMergeStrategies,
adapterIdToChildMergeStrategyKey,
other.childMergeStrategies,
other.adapterIdToChildMergeStrategyKey);
}
}
}
private static <T> void mergeMaps(
final Map<Integer, T> thisValues,
final Map<Short, Integer> thisAdapterIdToValueKeys,
final Map<Integer, T> otherValues,
final Map<Short, Integer> otherAdapterIdToValueKeys) {
// this was generalized to apply to both sample models and merge
// strategies, comments refer to sample models but in general it is also
// applied to merge strategies
// first check for sample models that exist in 'other' that do
// not exist in 'this'
for (final Entry<Integer, T> sampleModelEntry : otherValues.entrySet()) {
if (!thisValues.containsValue(sampleModelEntry.getValue())) {
// we need to add this sample model
final List<Short> adapterIds = new ArrayList<>();
// find all adapter IDs associated with this sample
// model
for (final Entry<Short, Integer> adapterIdEntry : otherAdapterIdToValueKeys.entrySet()) {
if (adapterIdEntry.getValue().equals(sampleModelEntry.getKey())) {
adapterIds.add(adapterIdEntry.getKey());
}
}
if (!adapterIds.isEmpty()) {
addValue(adapterIds, sampleModelEntry.getValue(), thisValues, thisAdapterIdToValueKeys);
}
}
}
// next check for adapter IDs that exist in 'other' that do not
// exist in 'this'
for (final Entry<Short, Integer> adapterIdEntry : otherAdapterIdToValueKeys.entrySet()) {
if (!thisAdapterIdToValueKeys.containsKey(adapterIdEntry.getKey())) {
// find the sample model associated with the adapter ID
// in 'other' and find what Integer it is with in 'this'
final T sampleModel = otherValues.get(adapterIdEntry.getValue());
if (sampleModel != null) {
// because the previous step added any missing
// sample models, it should be a fair assumption
// that the sample model exists in 'this'
for (final Entry<Integer, T> sampleModelEntry : thisValues.entrySet()) {
if (sampleModel.equals(sampleModelEntry.getValue())) {
// add the sample model key to the
// adapterIdToSampleModelKey map
thisAdapterIdToValueKeys.put(adapterIdEntry.getKey(), sampleModelEntry.getKey());
break;
}
}
}
}
}
}
private static synchronized <T> void addValue(
final List<Short> adapterIds,
final T sampleModel,
final Map<Integer, T> values,
final Map<Short, Integer> adapterIdToValueKeys) {
int nextId = 1;
boolean idAvailable = false;
while (!idAvailable) {
boolean idMatched = false;
for (final Integer id : values.keySet()) {
if (nextId == id.intValue()) {
idMatched = true;
break;
}
}
if (idMatched) {
// try the next incremental ID
nextId++;
} else {
// its not matched so we can use it
idAvailable = true;
}
}
values.put(nextId, sampleModel);
for (final Short adapterId : adapterIds) {
adapterIdToValueKeys.put(adapterId, nextId);
}
}
@SuppressFBWarnings(
value = {"DLS_DEAD_LOCAL_STORE"},
justification = "Incorrect warning, sampleModelBinary used")
@Override
public byte[] toBinary() {
int byteCount = 0;
final List<byte[]> sampleModelBinaries = new ArrayList<>();
final List<Integer> sampleModelKeys = new ArrayList<>();
int successfullySerializedModels = 0;
int successfullySerializedModelAdapters = 0;
final Set<Integer> successfullySerializedModelIds = new HashSet<>();
for (final Entry<Integer, SampleModel> entry : sampleModels.entrySet()) {
final SampleModel sampleModel = entry.getValue();
try {
final byte[] sampleModelBinary =
SampleModelPersistenceUtils.getSampleModelBinary(sampleModel);
byteCount += sampleModelBinary.length;
byteCount += VarintUtils.unsignedIntByteLength(sampleModelBinary.length);
byteCount += VarintUtils.unsignedIntByteLength(entry.getKey());
sampleModelBinaries.add(sampleModelBinary);
sampleModelKeys.add(entry.getKey());
successfullySerializedModels++;
successfullySerializedModelIds.add(entry.getKey());
} catch (final Exception e) {
LOGGER.warn("Unable to serialize sample model", e);
}
}
byteCount += VarintUtils.unsignedIntByteLength(successfullySerializedModelIds.size());
for (final Entry<Short, Integer> entry : adapterIdToSampleModelKey.entrySet()) {
if (successfullySerializedModelIds.contains(entry.getValue())) {
byteCount += VarintUtils.unsignedShortByteLength(entry.getKey());
byteCount += VarintUtils.unsignedIntByteLength(entry.getValue());
successfullySerializedModelAdapters++;
}
}
byteCount += VarintUtils.unsignedIntByteLength(successfullySerializedModelAdapters);
final List<byte[]> mergeStrategyBinaries = new ArrayList<>();
final List<Integer> mergeStrategyKeys = new ArrayList<>();
int successfullySerializedMergeStrategies = 0;
int successfullySerializedMergeAdapters = 0;
final Set<Integer> successfullySerializedMergeIds = new HashSet<>();
for (final Entry<Integer, RasterTileMergeStrategy<T>> entry : childMergeStrategies.entrySet()) {
final RasterTileMergeStrategy<T> mergeStrategy = entry.getValue();
final byte[] mergeStrategyBinary = PersistenceUtils.toBinary(mergeStrategy);
byteCount += mergeStrategyBinary.length;
byteCount += VarintUtils.unsignedIntByteLength(mergeStrategyBinary.length);
byteCount += VarintUtils.unsignedIntByteLength(entry.getKey());
mergeStrategyBinaries.add(mergeStrategyBinary);
mergeStrategyKeys.add(entry.getKey());
successfullySerializedMergeStrategies++;
successfullySerializedMergeIds.add(entry.getKey());
}
byteCount += VarintUtils.unsignedIntByteLength(successfullySerializedMergeStrategies);
for (final Entry<Short, Integer> entry : adapterIdToChildMergeStrategyKey.entrySet()) {
if (successfullySerializedMergeIds.contains(entry.getValue())) {
byteCount += VarintUtils.unsignedShortByteLength(entry.getKey());
byteCount += VarintUtils.unsignedIntByteLength(entry.getValue());
successfullySerializedMergeAdapters++;
}
}
byteCount += VarintUtils.unsignedIntByteLength(successfullySerializedMergeAdapters);
final ByteBuffer buf = ByteBuffer.allocate(byteCount);
VarintUtils.writeUnsignedInt(successfullySerializedModels, buf);
for (int i = 0; i < successfullySerializedModels; i++) {
final byte[] sampleModelBinary = sampleModelBinaries.get(i);
VarintUtils.writeUnsignedInt(sampleModelBinary.length, buf);
buf.put(sampleModelBinary);
VarintUtils.writeUnsignedInt(sampleModelKeys.get(i), buf);
}
VarintUtils.writeUnsignedInt(successfullySerializedModelAdapters, buf);
for (final Entry<Short, Integer> entry : adapterIdToSampleModelKey.entrySet()) {
if (successfullySerializedModelIds.contains(entry.getValue())) {
VarintUtils.writeUnsignedShort(entry.getKey(), buf);
VarintUtils.writeUnsignedInt(entry.getValue(), buf);
}
}
VarintUtils.writeUnsignedInt(successfullySerializedMergeStrategies, buf);
for (int i = 0; i < successfullySerializedMergeStrategies; i++) {
final byte[] mergeStrategyBinary = mergeStrategyBinaries.get(i);
VarintUtils.writeUnsignedInt(mergeStrategyBinary.length, buf);
buf.put(mergeStrategyBinary);
VarintUtils.writeUnsignedInt(mergeStrategyKeys.get(i), buf);
}
VarintUtils.writeUnsignedInt(successfullySerializedMergeAdapters, buf);
for (final Entry<Short, Integer> entry : adapterIdToChildMergeStrategyKey.entrySet()) {
if (successfullySerializedModelIds.contains(entry.getValue())) {
VarintUtils.writeUnsignedShort(entry.getKey(), buf);
VarintUtils.writeUnsignedInt(entry.getValue(), buf);
}
}
return buf.array();
}
@Override
public void fromBinary(final byte[] bytes) {
final ByteBuffer buf = ByteBuffer.wrap(bytes);
final int sampleModelSize = VarintUtils.readUnsignedInt(buf);
sampleModels = new HashMap<>(sampleModelSize);
for (int i = 0; i < sampleModelSize; i++) {
final byte[] sampleModelBinary =
ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));
if (sampleModelBinary.length > 0) {
try {
final int sampleModelKey = VarintUtils.readUnsignedInt(buf);
final SampleModel sampleModel =
SampleModelPersistenceUtils.getSampleModel(sampleModelBinary);
sampleModels.put(sampleModelKey, sampleModel);
} catch (final Exception e) {
LOGGER.warn("Unable to deserialize sample model", e);
}
} else {
LOGGER.warn("Sample model binary is empty, unable to deserialize");
}
}
final int sampleModelAdapterIdSize = VarintUtils.readUnsignedInt(buf);
adapterIdToSampleModelKey = new HashMap<>(sampleModelAdapterIdSize);
for (int i = 0; i < sampleModelAdapterIdSize; i++) {
adapterIdToSampleModelKey.put(
VarintUtils.readUnsignedShort(buf),
VarintUtils.readUnsignedInt(buf));
}
final int mergeStrategySize = VarintUtils.readUnsignedInt(buf);
childMergeStrategies = new HashMap<>(mergeStrategySize);
for (int i = 0; i < mergeStrategySize; i++) {
final byte[] mergeStrategyBinary =
ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf));
if (mergeStrategyBinary.length > 0) {
try {
final RasterTileMergeStrategy mergeStrategy =
(RasterTileMergeStrategy) PersistenceUtils.fromBinary(mergeStrategyBinary);
final int mergeStrategyKey = VarintUtils.readUnsignedInt(buf);
if (mergeStrategy != null) {
childMergeStrategies.put(mergeStrategyKey, mergeStrategy);
}
} catch (final Exception e) {
LOGGER.warn("Unable to deserialize merge strategy", e);
}
} else {
LOGGER.warn("Merge strategy binary is empty, unable to deserialize");
}
}
final int mergeStrategyAdapterIdSize = VarintUtils.readUnsignedInt(buf);
adapterIdToChildMergeStrategyKey = new HashMap<>(mergeStrategyAdapterIdSize);
for (int i = 0; i < mergeStrategyAdapterIdSize; i++) {
adapterIdToChildMergeStrategyKey.put(
VarintUtils.readUnsignedShort(buf),
VarintUtils.readUnsignedInt(buf));
}
}
// public T getMetadata(
// final GridCoverage tileGridCoverage,
// final Map originalCoverageProperties,
// final RasterDataAdapter dataAdapter ) {
// final RasterTileMergeStrategy<T> childMergeStrategy =
// getChildMergeStrategy(dataAdapter.getAdapterId());
// if (childMergeStrategy != null) {
// return childMergeStrategy.getMetadata(
// tileGridCoverage,
// dataAdapter);
// }
// return null;
// }
@Override
public void merge(
final RasterTile thisTile,
final RasterTile nextTile,
final short internalAdapterId) {
final RasterTileMergeStrategy<T> childMergeStrategy = getChildMergeStrategy(internalAdapterId);
if (childMergeStrategy != null) {
childMergeStrategy.merge(thisTile, nextTile, getSampleModel(internalAdapterId));
}
}
}
| spohnan/geowave | extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/MultiAdapterServerMergeStrategy.java | Java | apache-2.0 | 15,171 |
package com.ssis.village.model;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.OneToMany;
import javax.persistence.Table;
@SuppressWarnings("serial")
@Entity
@Table(name = "postYourAd")
public class PostYourAd implements Serializable {
private Integer postYourAdId;
private String senderEmail;
private String userName;
private String userProfileModifiedName;
private Date postYourAdDate;
private String title;
private List<PostYourAdAttachments> postYourAdAttachments = new ArrayList<PostYourAdAttachments>();
private List<PostYourAdComments> postYourAdComments = new ArrayList<PostYourAdComments>();
private List<PostAdUserLikes> postAdUserLikes = new ArrayList<PostAdUserLikes>();
public PostYourAd() {
}
public PostYourAd(Integer postYourAdId) {
super();
this.postYourAdId = postYourAdId;
}
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "postYourAdId")
public Integer getPostYourAdId() {
return postYourAdId;
}
public void setPostYourAdId(Integer postYourAdId) {
this.postYourAdId = postYourAdId;
}
@Column(name = "senderEmail")
public String getSenderEmail() {
return senderEmail;
}
public void setSenderEmail(String senderEmail) {
this.senderEmail = senderEmail;
}
@Column(name = "postYourAdDate")
public Date getPostYourAdDate() {
return postYourAdDate;
}
public void setPostYourAdDate(Date postYourAdDate) {
this.postYourAdDate = postYourAdDate;
}
@Column(name = "userProfileModifiedName")
public String getUserProfileModifiedName() {
return userProfileModifiedName;
}
public void setUserProfileModifiedName(String userProfileModifiedName) {
this.userProfileModifiedName = userProfileModifiedName;
}
@OneToMany(fetch = FetchType.LAZY, mappedBy = "postYourAd", cascade = CascadeType.ALL, orphanRemoval = true)
public List<PostYourAdAttachments> getPostYourAdAttachments() {
return postYourAdAttachments;
}
public void setPostYourAdAttachments(
List<PostYourAdAttachments> postYourAdAttachments) {
this.postYourAdAttachments = postYourAdAttachments;
}
@OneToMany(fetch = FetchType.LAZY, mappedBy = "postYourAd", cascade = CascadeType.ALL, orphanRemoval = true)
public List<PostYourAdComments> getPostYourAdComments() {
return postYourAdComments;
}
public void setPostYourAdComments(
List<PostYourAdComments> postYourAdComments) {
this.postYourAdComments = postYourAdComments;
}
@Column(name = "userName")
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
@Column(name = "title")
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
@OneToMany(fetch = FetchType.LAZY, mappedBy = "postYourAd", cascade = CascadeType.ALL, orphanRemoval = true)
public List<PostAdUserLikes> getPostAdUserLikes() {
return postAdUserLikes;
}
public void setPostAdUserLikes(List<PostAdUserLikes> postAdUserLikes) {
this.postAdUserLikes = postAdUserLikes;
}
}
| guramma/myvillage | src/main/java/com/ssis/village/model/PostYourAd.java | Java | apache-2.0 | 3,481 |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# Source: opentelemetry/proto/collector/trace/v1/trace_service.proto for package 'opentelemetry.proto.collector.trace.v1'
# Original file comments:
# Copyright 2019, OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'grpc'
require 'opentelemetry/proto/collector/trace/v1/trace_service_pb'
module Opentelemetry
module Proto
module Collector
module Trace
module V1
module TraceService
# Service that can be used to push spans between one Application instrumented with
# OpenTelemetry and a collector, or between a collector and a central collector (in this
# case spans are sent/received to/from multiple Applications).
class Service
include GRPC::GenericService
self.marshal_class_method = :encode
self.unmarshal_class_method = :decode
self.service_name = 'opentelemetry.proto.collector.trace.v1.TraceService'
# For performance reasons, it is recommended to keep this RPC
# alive for the entire life of the application.
rpc :Export, ::Opentelemetry::Proto::Collector::Trace::V1::ExportTraceServiceRequest, ::Opentelemetry::Proto::Collector::Trace::V1::ExportTraceServiceResponse
end
Stub = Service.rpc_stub_class
end
end
end
end
end
end
| open-telemetry/opentelemetry-ruby | exporter/otlp/lib/opentelemetry/proto/collector/trace/v1/trace_service_services_pb.rb | Ruby | apache-2.0 | 1,959 |
cordova.define("nl.x-services.plugins.toast.tests", function(require, exports, module) { exports.defineAutoTests = function() {
var fail = function (done) {
expect(true).toBe(false);
done();
},
succeed = function (done) {
expect(true).toBe(true);
done();
};
describe('Plugin availability', function () {
it("window.plugins.toast should exist", function() {
expect(window.plugins.toast).toBeDefined();
});
});
describe('API functions', function () {
it("should define show", function() {
expect(window.plugins.toast.show).toBeDefined();
});
it("should define showShortTop", function() {
expect(window.plugins.toast.showShortTop).toBeDefined();
});
it("should define showShortCenter", function() {
expect(window.plugins.toast.showShortCenter).toBeDefined();
});
it("should define showShortBottom", function() {
expect(window.plugins.toast.showShortBottom).toBeDefined();
});
it("should define showLongTop", function() {
expect(window.plugins.toast.showLongTop).toBeDefined();
});
it("should define showLongCenter", function() {
expect(window.plugins.toast.showLongCenter).toBeDefined();
});
it("should define showLongBottom", function() {
expect(window.plugins.toast.showLongBottom).toBeDefined();
});
});
describe('Invalid usage', function () {
it("should fail due to an invalid position", function(done) {
window.plugins.toast.show('hi', 'short', 'nowhere', fail.bind(null, done), succeed.bind(null, done));
});
it("should fail due to an invalid duration", function(done) {
window.plugins.toast.show('hi', 'medium', 'top', fail.bind(null, done), succeed.bind(null, done));
});
});
};
});
| digitalunity/wbtest | platforms/windows/www/plugins/nl.x-services.plugins.toast/test/tests.js | JavaScript | apache-2.0 | 1,779 |