code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
/*
* Copyright 2002-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.utn.dacs2017.compraventa.vendedor;
import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
/**
* @author Grego Dadone
*/
@Controller
class VendedorController {
private final VendedorRepository vendedores;
@Autowired
public VendedorController(VendedorRepository vendedorService) {
this.vendedores = vendedorService;
}
@RequestMapping(value = { "/vendedores.html" })
public String showVendedorList(Map<String, Object> model) {
Vendedores vendedores = new Vendedores();
vendedores.getVendedorList().addAll(this.vendedores.findAll());
model.put("vendedores", vendedores);
return "vendedores/vendedorList";
}
@RequestMapping(value = { "/vendedores.json", "/vendedores.xml" })
public @ResponseBody Vendedores showResourcesVendedorList() {
Vendedores vendedores = new Vendedores();
vendedores.getVendedorList().addAll(this.vendedores.findAll());
return vendedores;
}
}
| gregodadone/spring-compraventa | src/main/java/org/utn/dacs2017/compraventa/vendedor/VendedorController.java | Java | apache-2.0 | 1,812 |
package com.afollestad.breadcrumb;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.graphics.RectF;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.support.annotation.NonNull;
import android.support.v4.view.ViewCompat;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.HorizontalScrollView;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/**
* @author Aidan Follestad (afollestad)
*/
public class LinearBreadcrumb extends HorizontalScrollView implements View.OnClickListener {
public static class Crumb implements Serializable {
public Crumb(String path,String attachMsg) {
mPath = path;
mAttachMsg = attachMsg;
}
private final String mPath;
private final String mAttachMsg;
private int mScrollY;
private int mScrollOffset;
public int getScrollY() {
return mScrollY;
}
public int getScrollOffset() {
return mScrollOffset;
}
public void setScrollY(int scrollY) {
this.mScrollY = scrollY;
}
public void setScrollOffset(int scrollOffset) {
this.mScrollOffset = scrollOffset;
}
public String getPath() {
return mPath;
}
public String getTitle() {
return (!TextUtils.isEmpty(mAttachMsg)) ? mAttachMsg : mPath;
}
public String getmAttachMsg() {
return mAttachMsg;
}
@Override
public boolean equals(Object o) {
return (o instanceof Crumb) && ((Crumb) o).getPath().equals(getPath());
}
@Override
public String toString() {
return "Crumb{" +
"mAttachMsg='" + mAttachMsg + '\'' +
", mPath='" + mPath + '\'' +
", mScrollY=" + mScrollY +
", mScrollOffset=" + mScrollOffset +
'}';
}
}
public interface SelectionCallback {
void onCrumbSelection(Crumb crumb, String absolutePath, int count, int index);
}
public LinearBreadcrumb(Context context) {
super(context);
init();
}
public LinearBreadcrumb(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public LinearBreadcrumb(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init();
}
private List<Crumb> mCrumbs;
private List<Crumb> mOldCrumbs;
private LinearLayout mChildFrame;
private int mActive;
private SelectionCallback mCallback;
private void init() {
setMinimumHeight((int) getResources().getDimension(R.dimen.breadcrumb_height));
setClipToPadding(false);
mCrumbs = new ArrayList<>();
mChildFrame = new LinearLayout(getContext());
addView(mChildFrame, new ViewGroup.LayoutParams(
ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.MATCH_PARENT));
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private void setAlpha(View view, int alpha) {
if (view instanceof ImageView && Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
((ImageView) view).setImageAlpha(alpha);
} else {
ViewCompat.setAlpha(view, alpha);
}
}
public void addCrumb(@NonNull Crumb crumb, boolean refreshLayout) {
LinearLayout view = (LinearLayout) LayoutInflater.from(getContext()).inflate(R.layout.bread_crumb, this, false);
view.setTag(mCrumbs.size());
view.setClickable(true);
view.setFocusable(true);
view.setOnClickListener(this);
mChildFrame.addView(view, new ViewGroup.LayoutParams(
ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));
mCrumbs.add(crumb);
if (refreshLayout) {
mActive = mCrumbs.size() - 1;
requestLayout();
}
invalidateActivatedAll();
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
super.onLayout(changed, l, t, r, b);
//RTL works fine like this
View child = mChildFrame.getChildAt(mActive);
if (child != null)
smoothScrollTo(child.getLeft(), 0);
}
public Crumb findCrumb(@NonNull String forDir) {
for (int i = 0; i < mCrumbs.size(); i++) {
if (mCrumbs.get(i).getPath().equals(forDir))
return mCrumbs.get(i);
}
return null;
}
public void clearCrumbs() {
try {
mOldCrumbs = new ArrayList<>(mCrumbs);
mCrumbs.clear();
mChildFrame.removeAllViews();
} catch (IllegalStateException e) {
e.printStackTrace();
}
}
public Crumb getCrumb(int index) {
return mCrumbs.get(index);
}
public void setCallback(SelectionCallback callback) {
mCallback = callback;
}
public boolean setActive(Crumb newActive) {
mActive = mCrumbs.indexOf(newActive);
for(int i = size()-1;size()>mActive+1;i--){
removeCrumbAt(i);
}
((LinearLayout)mChildFrame.getChildAt(mActive)).getChildAt(1).setVisibility(View.GONE);
boolean success = mActive > -1;
if (success)
requestLayout();
return success;
}
private void invalidateActivatedAll() {
for (int i = 0; i < mCrumbs.size(); i++) {
Crumb crumb = mCrumbs.get(i);
invalidateActivated(mChildFrame.getChildAt(i), mActive == mCrumbs.indexOf(crumb),
i < mCrumbs.size() - 1).setText(crumb.getTitle());
}
}
public void removeCrumbAt(int index) {
mCrumbs.remove(index);
mChildFrame.removeViewAt(index);
}
private void updateIndices() {
for (int i = 0; i < mChildFrame.getChildCount(); i++)
mChildFrame.getChildAt(i).setTag(i);
}
private boolean isValidPath(String path) {
return path == null;
}
public int size() {
return mCrumbs.size();
}
private TextView invalidateActivated(View view, boolean isActive, boolean isShowSeparator) {
LinearLayout child = (LinearLayout) view;
if (isShowSeparator)
child.getChildAt(1).setVisibility(View.VISIBLE);
return (TextView) child.getChildAt(0);
}
public int getActiveIndex() {
return mActive;
}
@Override
public void onClick(View v) {
if (mCallback != null) {
int index = (Integer) v.getTag();
if (index >= 0 && index < (size()-1)) {
setActive(mCrumbs.get(index));
mCallback.onCrumbSelection(mCrumbs.get(index),
getAbsolutePath(mCrumbs.get(index), "/"), mCrumbs.size(), index);
}
}
}
public static class SavedStateWrapper implements Serializable {
public final int mActive;
public final List<Crumb> mCrumbs;
public final int mVisibility;
public SavedStateWrapper(LinearBreadcrumb view) {
mActive = view.mActive;
mCrumbs = view.mCrumbs;
mVisibility = view.getVisibility();
}
}
public SavedStateWrapper getStateWrapper() {
return new SavedStateWrapper(this);
}
public void restoreFromStateWrapper(SavedStateWrapper mSavedState, Activity context) {
if (mSavedState != null) {
mActive = mSavedState.mActive;
for (Crumb c : mSavedState.mCrumbs) {
addCrumb(c, false);
}
requestLayout();
setVisibility(mSavedState.mVisibility);
}
}
public String getAbsolutePath(Crumb crumb, @NonNull String separator) {
StringBuilder builder = new StringBuilder();
if (size() > 1 && !crumb.equals(mCrumbs.get(0))) {
List<Crumb> crumbs = mCrumbs.subList(1, size());
for (Crumb mCrumb : crumbs) {
builder.append(mCrumb.getPath());
builder.append(separator);
if (mCrumb.equals(crumb)) {
break;
}
}
String path = builder.toString();
return path.substring(0, path.length() -1);
} else {
return null;
}
}
public String getCurAbsolutePath(@NonNull String separator){
return getAbsolutePath(getCrumb(mActive),separator);
}
public void addRootCrumb() {
clearCrumbs();
addCrumb(new Crumb("/","root"), true);
}
public void addPath(@NonNull String path,@NonNull String sha, @NonNull String separator) {
clearCrumbs();
addCrumb(new Crumb("",""), false);
String[] paths = path.split(separator);
Crumb lastCrumb = null;
for (String splitPath : paths) {
lastCrumb = new Crumb(splitPath,sha);
addCrumb(lastCrumb, false);
}
if (lastCrumb != null) {
setActive(lastCrumb);
}
}
} | jbm714060/Github | breadcrumb/src/main/java/com/afollestad/breadcrumb/LinearBreadcrumb.java | Java | apache-2.0 | 9,519 |
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const invariant = require('invariant');
/**
* @typedef {object} request
* @property {string} method
* @property {string} route - string like /abc/:abc
* @property {string} params
* @property {object} [body]
* @property {object} headers
*/
/**
* @typedef {object} reponse
* @property {Error} error - an error which occured during req or res
* @property {object} body - content received from server (parsed)
* @property {object} headers - set additional request headers
* @property {number} status - http status code; 0 on failure
*/
/**
* XHR wrapper for same-domain requests with Content-Type: application/json
*
* @param {request} request
* @return {Promise}
*/
export default function implore(request) {
return new Promise(resolve => {
const response = {
error: null
};
invariant(
request,
'implore requires a `request` argument'
);
invariant(
typeof request.route === 'string',
'implore requires parameter `route` to be a string'
);
invariant(
typeof request.method === 'string',
'implore requires parameter `method` to be a string'
);
const xhr = new XMLHttpRequest();
xhr.open(request.method, getURLFromRequest(request));
switch (request.method) {
case 'POST':
case 'PUT':
case 'PATCH':
xhr.setRequestHeader('Content-Type', 'application/json');
break;
}
if (request.headers) {
invariant(
typeof request.headers === 'object',
'implore requires parameter `headers` to be an object'
);
Object.keys(request.headers).forEach((header) => {
xhr.setRequestHeader(header, request.headers[header]);
});
}
xhr.onreadystatechange = function onreadystatechange() {
let responseText;
if (xhr.readyState === 4) {
responseText = xhr.responseText;
response.status = xhr.status;
response.type = xhr.getResponseHeader('Content-Type');
if (response.type === 'application/json') {
try {
response.body = JSON.parse(responseText);
}
catch (err) {
err.message = err.message + ' while parsing `' +
responseText + '`';
response.body = {};
response.status = xhr.status || 0;
response.error = err;
}
}
else {
response.body = responseText;
}
return resolve({
request,
response
});
}
};
try {
if (request.body) {
xhr.send(JSON.stringify(request.body));
}
else {
xhr.send();
}
}
catch (err) {
response.body = {};
response.status = 0;
response.error = err;
return resolve({
request,
response
});
}
});
}
implore.get = function get(options) {
options.method = 'GET';
return implore(options);
};
implore.post = function post(options) {
options.method = 'POST';
return implore(options);
};
implore.put = function put(options) {
options.method = 'PUT';
return implore(options);
};
implore.delete = function httpDelete(options) {
options.method = 'DELETE';
return implore(options);
};
/**
* Combine the route/params/query of a request into a complete URL
*
* @param {request} request
* @param {object|array} request.query
* @return {string} url
*/
function getURLFromRequest(request) {
const queryString = makeQueryString(request.query || {});
let formatted = request.route;
let name;
let value;
let regexp;
for (name in request.params) {
if (request.params.hasOwnProperty(name)) {
value = request.params[name];
regexp = new RegExp(':' + name + '(?=(\\\/|$))');
formatted = formatted.replace(regexp, value);
}
}
return formatted + (queryString ? '?' + queryString : '');
}
/**
* Take a simple object and turn it into a queryString, recursively.
*
* @param {object} obj - query object
* @param {string} prefix - used in recursive calls to keep track of the parent
* @return {string} queryString without the '?''
*/
function makeQueryString(obj, prefix='') {
const str = [];
let prop;
let key;
let value;
for (prop in obj) {
if (obj.hasOwnProperty(prop)) {
key = prefix ?
prefix + '[' + prop + ']' :
prop;
value = obj[prop];
str.push(typeof value === 'object' ?
makeQueryString(value, key) :
encodeURIComponent(key) + '=' + encodeURIComponent(value));
}
}
return str.join('&');
}
| mdhgriffiths/fluxthis | lib/implore.es6.js | JavaScript | apache-2.0 | 4,808 |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.impl.cmd;
import java.io.Serializable;
import org.flowable.engine.common.api.FlowableException;
import org.flowable.engine.common.api.FlowableIllegalArgumentException;
import org.flowable.engine.common.api.FlowableObjectNotFoundException;
import org.flowable.engine.common.impl.interceptor.Command;
import org.flowable.engine.common.impl.interceptor.CommandContext;
import org.flowable.engine.form.TaskFormData;
import org.flowable.engine.impl.form.FormEngine;
import org.flowable.engine.impl.form.TaskFormHandler;
import org.flowable.engine.impl.persistence.entity.TaskEntity;
import org.flowable.engine.impl.util.CommandContextUtil;
import org.flowable.engine.impl.util.FormHandlerUtil;
import org.flowable.engine.task.Task;
/**
* @author Tom Baeyens
* @author Joram Barrez
*/
public class GetRenderedTaskFormCmd implements Command<Object>, Serializable {
private static final long serialVersionUID = 1L;
protected String taskId;
protected String formEngineName;
public GetRenderedTaskFormCmd(String taskId, String formEngineName) {
this.taskId = taskId;
this.formEngineName = formEngineName;
}
public Object execute(CommandContext commandContext) {
if (taskId == null) {
throw new FlowableIllegalArgumentException("Task id should not be null");
}
TaskEntity task = CommandContextUtil.getTaskEntityManager(commandContext).findById(taskId);
if (task == null) {
throw new FlowableObjectNotFoundException("Task '" + taskId + "' not found", Task.class);
}
TaskFormHandler taskFormHandler = FormHandlerUtil.getTaskFormHandlder(task);
if (taskFormHandler != null) {
FormEngine formEngine = CommandContextUtil.getProcessEngineConfiguration(commandContext).getFormEngines().get(formEngineName);
if (formEngine == null) {
throw new FlowableException("No formEngine '" + formEngineName + "' defined process engine configuration");
}
TaskFormData taskForm = taskFormHandler.createTaskForm(task);
return formEngine.renderTaskForm(taskForm);
}
return null;
}
}
| stephraleigh/flowable-engine | modules/flowable-engine/src/main/java/org/flowable/engine/impl/cmd/GetRenderedTaskFormCmd.java | Java | apache-2.0 | 2,774 |
package com.stdnull.v2api.model;
import android.os.Bundle;
import android.os.Parcelable;
import java.util.ArrayList;
import java.util.List;
/**
* Created by chen on 2017/8/20.
*/
public class V2MainFragModel {
private static final String KEY_V2EXBEAN = "KEY_V2EXBEAN";
private List<V2ExBean> mContentListModel = new ArrayList<>();
public List<V2ExBean> getContentListModel() {
return mContentListModel;
}
public void addContentListModel(List<V2ExBean> contentListModel) {
if(contentListModel != null) {
this.mContentListModel.addAll(contentListModel);
}
}
public boolean isModelEmpty(){
return mContentListModel.isEmpty() ;
}
public void clearModel(){
mContentListModel.clear();
}
public void save(Bundle bundle){
bundle.putParcelableArrayList(KEY_V2EXBEAN, (ArrayList<? extends Parcelable>) mContentListModel);
}
public boolean restore(Bundle bundle){
if(bundle == null){
return false;
}
mContentListModel = bundle.getParcelableArrayList(KEY_V2EXBEAN);
return mContentListModel != null && !mContentListModel.isEmpty();
}
}
| stdnull/RunMap | v2api/src/main/java/com/stdnull/v2api/model/V2MainFragModel.java | Java | apache-2.0 | 1,194 |
/*
* Copyright 2016 Amadeus s.a.s.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
"use strict";
const co = require("co");
const path = require("path");
const assertFilesEqual = require("../helpers/assertFilesEqual");
const exec = require("../helpers/exec");
module.exports = function (results) {
const outDir = results.outDir;
const atDiffExecutable = require.resolve("../../bin/at-diff");
const filesToCompare = [
// The .json extension is automatically added
"version1.parse",
"version2.parse",
"user.parse",
"at.parse",
"version1to2.diff",
"filteredVersion1to2.diff",
"impactsOnUser.diff",
"filteredImpactsOnUser.diff"
];
filesToCompare.forEach((fileName) => {
const nonDeterministicFileName = `${fileName}.json`;
it(nonDeterministicFileName, co.wrap(function *() {
this.timeout(10000);
const transformCommand = /\.parse$/.test(fileName) ? "reformat" : "reserialize";
const deterministicFileName = `${fileName}.deterministic.json`;
yield exec(atDiffExecutable, [transformCommand, nonDeterministicFileName, "--json-output", deterministicFileName, "--deterministic-output", "--json-beautify"], {
cwd: outDir
});
yield assertFilesEqual(path.join(outDir, deterministicFileName), path.join(__dirname, "..", "expected-output", deterministicFileName));
}));
});
};
| ariatemplates/at-diff | test/checks/compareWithPreviousOutputs.check.js | JavaScript | apache-2.0 | 1,985 |
/*
* Solo - A small and beautiful blogging system written in Java.
* Copyright (c) 2010-present, b3log.org
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package org.b3log.solo.model;
/**
* This class defines all link model relevant keys.
*
* @author <a href="http://88250.b3log.org">Liang Ding</a>
* @version 1.0.0.2, Oct 31, 2011
* @since 0.3.1
*/
public final class Link {
/**
* Link.
*/
public static final String LINK = "link";
/**
* Links.
*/
public static final String LINKS = "links";
/**
* Key of title.
*/
public static final String LINK_TITLE = "linkTitle";
/**
* Key of address.
*/
public static final String LINK_ADDRESS = "linkAddress";
/**
* Key of description.
*/
public static final String LINK_DESCRIPTION = "linkDescription";
/**
* Key of order.
*/
public static final String LINK_ORDER = "linkOrder";
/**
* Private constructor.
*/
private Link() {
}
}
| b3log/b3log-solo | src/main/java/org/b3log/solo/model/Link.java | Java | apache-2.0 | 1,646 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.management;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import javax.management.openmbean.TabularData;
import org.apache.camel.CamelContext;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.util.StringHelper;
import org.junit.Ignore;
/**
* @version
*/
public class ManagedCamelContextTest extends ManagementTestSupport {
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
// to force a different management name than the camel id
context.getManagementNameStrategy().setNamePattern("19-#name#");
return context;
}
public void testManagedCamelContext() throws Exception {
// JMX tests dont work well on AIX CI servers (hangs them)
if (isPlatform("aix")) {
return;
}
MBeanServer mbeanServer = getMBeanServer();
ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\"");
assertTrue("Should be registered", mbeanServer.isRegistered(on));
String name = (String) mbeanServer.getAttribute(on, "CamelId");
assertEquals("camel-1", name);
String managementName = (String) mbeanServer.getAttribute(on, "ManagementName");
assertEquals("19-camel-1", managementName);
String uptime = (String) mbeanServer.getAttribute(on, "Uptime");
assertNotNull(uptime);
String status = (String) mbeanServer.getAttribute(on, "State");
assertEquals("Started", status);
Boolean messageHistory = (Boolean) mbeanServer.getAttribute(on, "MessageHistory");
assertEquals(Boolean.TRUE, messageHistory);
Integer total = (Integer) mbeanServer.getAttribute(on, "TotalRoutes");
assertEquals(2, total.intValue());
Integer started = (Integer) mbeanServer.getAttribute(on, "StartedRoutes");
assertEquals(2, started.intValue());
// invoke operations
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Hello World");
mbeanServer.invoke(on, "sendBody", new Object[]{"direct:start", "Hello World"}, new String[]{"java.lang.String", "java.lang.Object"});
assertMockEndpointsSatisfied();
resetMocks();
mock.expectedBodiesReceived("Hello World");
mbeanServer.invoke(on, "sendStringBody", new Object[]{"direct:start", "Hello World"}, new String[]{"java.lang.String", "java.lang.String"});
assertMockEndpointsSatisfied();
Object reply = mbeanServer.invoke(on, "requestBody", new Object[]{"direct:foo", "Hello World"}, new String[]{"java.lang.String", "java.lang.Object"});
assertEquals("Bye World", reply);
reply = mbeanServer.invoke(on, "requestStringBody", new Object[]{"direct:foo", "Hello World"}, new String[]{"java.lang.String", "java.lang.String"});
assertEquals("Bye World", reply);
resetMocks();
mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Hello World");
mock.expectedHeaderReceived("foo", 123);
Map<String, Object> headers = new HashMap<String, Object>();
headers.put("foo", 123);
mbeanServer.invoke(on, "sendBodyAndHeaders", new Object[]{"direct:start", "Hello World", headers}, new String[]{"java.lang.String", "java.lang.Object", "java.util.Map"});
assertMockEndpointsSatisfied();
resetMocks();
mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Hello World");
mock.expectedHeaderReceived("foo", 123);
reply = mbeanServer.invoke(on, "requestBodyAndHeaders", new Object[]{"direct:start", "Hello World", headers}, new String[]{"java.lang.String", "java.lang.Object", "java.util.Map"});
assertEquals("Hello World", reply);
assertMockEndpointsSatisfied();
// test can send
Boolean can = (Boolean) mbeanServer.invoke(on, "canSendToEndpoint", new Object[]{"direct:start"}, new String[]{"java.lang.String"});
assertEquals(true, can.booleanValue());
can = (Boolean) mbeanServer.invoke(on, "canSendToEndpoint", new Object[]{"timer:foo"}, new String[]{"java.lang.String"});
assertEquals(false, can.booleanValue());
// stop Camel
mbeanServer.invoke(on, "stop", null, null);
}
public void testManagedCamelContextCreateEndpoint() throws Exception {
// JMX tests dont work well on AIX CI servers (hangs them)
if (isPlatform("aix")) {
return;
}
MBeanServer mbeanServer = getMBeanServer();
ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\"");
assertNull(context.hasEndpoint("seda:bar"));
// create a new endpoint
Object reply = mbeanServer.invoke(on, "createEndpoint", new Object[]{"seda:bar"}, new String[]{"java.lang.String"});
assertEquals(Boolean.TRUE, reply);
assertNotNull(context.hasEndpoint("seda:bar"));
ObjectName seda = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=endpoints,name=\"seda://bar\"");
boolean registered = mbeanServer.isRegistered(seda);
assertTrue("Should be registered " + seda, registered);
// create it again
reply = mbeanServer.invoke(on, "createEndpoint", new Object[]{"seda:bar"}, new String[]{"java.lang.String"});
assertEquals(Boolean.FALSE, reply);
registered = mbeanServer.isRegistered(seda);
assertTrue("Should be registered " + seda, registered);
}
public void testManagedCamelContextRemoveEndpoint() throws Exception {
// JMX tests dont work well on AIX CI servers (hangs them)
if (isPlatform("aix")) {
return;
}
MBeanServer mbeanServer = getMBeanServer();
ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\"");
assertNull(context.hasEndpoint("seda:bar"));
// create a new endpoint
Object reply = mbeanServer.invoke(on, "createEndpoint", new Object[]{"seda:bar"}, new String[]{"java.lang.String"});
assertEquals(Boolean.TRUE, reply);
assertNotNull(context.hasEndpoint("seda:bar"));
ObjectName seda = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=endpoints,name=\"seda://bar\"");
boolean registered = mbeanServer.isRegistered(seda);
assertTrue("Should be registered " + seda, registered);
// remove it
Object num = mbeanServer.invoke(on, "removeEndpoints", new Object[]{"seda:*"}, new String[]{"java.lang.String"});
assertEquals(1, num);
assertNull(context.hasEndpoint("seda:bar"));
registered = mbeanServer.isRegistered(seda);
assertFalse("Should not be registered " + seda, registered);
// remove it again
num = mbeanServer.invoke(on, "removeEndpoints", new Object[]{"seda:*"}, new String[]{"java.lang.String"});
assertEquals(0, num);
assertNull(context.hasEndpoint("seda:bar"));
registered = mbeanServer.isRegistered(seda);
assertFalse("Should not be registered " + seda, registered);
}
public void testFindComponentsInClasspath() throws Exception {
// JMX tests dont work well on AIX CI servers (hangs them)
if (isPlatform("aix")) {
return;
}
MBeanServer mbeanServer = getMBeanServer();
ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\"");
assertTrue("Should be registered", mbeanServer.isRegistered(on));
@SuppressWarnings("unchecked")
Map<String, Properties> info = (Map<String, Properties>) mbeanServer.invoke(on, "findComponents", null, null);
assertNotNull(info);
assertTrue(info.size() > 20);
Properties prop = info.get("seda");
assertNotNull(prop);
assertEquals("seda", prop.get("name"));
assertEquals("org.apache.camel", prop.get("groupId"));
assertEquals("camel-core", prop.get("artifactId"));
}
public void testManagedCamelContextCreateRouteStaticEndpointJson() throws Exception {
// JMX tests dont work well on AIX CI servers (hangs them)
if (isPlatform("aix")) {
return;
}
MBeanServer mbeanServer = getMBeanServer();
ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\"");
// get the json
String json = (String) mbeanServer.invoke(on, "createRouteStaticEndpointJson", null, null);
assertNotNull(json);
assertEquals(7, StringHelper.countChar(json, '{'));
assertEquals(7, StringHelper.countChar(json, '}'));
assertTrue(json.contains("{ \"uri\": \"direct://start\" }"));
assertTrue(json.contains("{ \"uri\": \"direct://foo\" }"));
}
public void testManagedCamelContextExplainEndpointUri() throws Exception {
// JMX tests dont work well on AIX CI servers (hangs them)
if (isPlatform("aix")) {
return;
}
MBeanServer mbeanServer = getMBeanServer();
ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\"");
// get the json
String json = (String) mbeanServer.invoke(on, "explainEndpointJson", new Object[]{"log:foo?groupDelay=2000&groupSize=5", false},
new String[]{"java.lang.String", "boolean"});
assertNotNull(json);
assertEquals(5, StringHelper.countChar(json, '{'));
assertEquals(5, StringHelper.countChar(json, '}'));
assertTrue(json.contains("\"groupDelay\": { \"kind\": \"parameter\", \"type\": \"integer\", \"javaType\": \"java.lang.Long\", \"deprecated\": \"false\", \"value\": \"2000\","
+ " \"description\": \"Set the initial delay for stats (in millis)\" },"));
assertTrue(json.contains("\"groupSize\": { \"kind\": \"parameter\", \"type\": \"integer\", \"javaType\": \"java.lang.Integer\", \"deprecated\": \"false\", \"value\": \"5\","
+ " \"description\": \"An integer that specifies a group size for throughput logging.\" }"));
assertTrue(json.contains("\"loggerName\": { \"kind\": \"path\", \"type\": \"string\", \"javaType\": \"java.lang.String\", \"deprecated\": \"false\","
+ " \"value\": \"foo\", \"description\": \"The logger name to use\" }"));
}
public void testManagedCamelContextExplainEip() throws Exception {
// JMX tests dont work well on AIX CI servers (hangs them)
if (isPlatform("aix")) {
return;
}
MBeanServer mbeanServer = getMBeanServer();
ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\"");
// get the json
String json = (String) mbeanServer.invoke(on, "explainEipJson", new Object[]{"transform", false}, new String[]{"java.lang.String", "boolean"});
assertNotNull(json);
assertTrue(json.contains("\"label\": \"transformation\""));
assertTrue(json.contains("\"expression\": { \"kind\": \"element\", \"required\": \"true\", \"type\": \"object\""));
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").to("mock:result");
from("direct:foo").transform(constant("Bye World"));
}
};
}
}
| logzio/camel | camel-core/src/test/java/org/apache/camel/management/ManagedCamelContextTest.java | Java | apache-2.0 | 12,715 |
/*
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy
of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License.
Initial implementation:
* http://www.mudynamics.com
* http://labs.mudynamics.com
* http://www.pcapr.net
*/
(function($) {
var now = new Date().getTime();
var millisInHHour = 1000*60*30;
$.jscouch = $.jscouch || {};
$.jscouch.documents = $.jscouch.documents || {};
$.extend($.jscouch.documents, {
load: function() {
// popluate the DB with initial entries
$.jscouch.couchdb.put({
name: 'fish.jpg',
created_at: new Date(now + millisInHHour*Math.random()).toUTCString(),
user: 'bob',
type: 'jpeg',
camera: 'nikon',
info: {
width: 100,
height: 200,
size: 12345
},
tags: [ 'tuna', 'shark' ]
});
$.jscouch.couchdb.put({
name: 'trees.jpg',
created_at: new Date(now + millisInHHour*Math.random()).toUTCString(),
user: 'john',
type: 'jpeg',
camera: 'canon',
info: {
width: 30,
height: 250,
size: 32091
},
tags: [ 'oak' ]
});
$.jscouch.couchdb.put({
name: 'snow.png',
created_at: new Date(now + millisInHHour*Math.random()).toUTCString(),
user: 'john',
type: 'png',
camera: 'canon',
info: {
width: 64,
height: 64,
size: 1253
},
tags: [ 'tahoe', 'powder' ]
});
$.jscouch.couchdb.put({
name: 'hawaii.png',
created_at: new Date(now + millisInHHour*Math.random()).toUTCString(),
user: 'john',
type: 'png',
camera: 'nikon',
info: {
width: 128,
height: 64,
size: 92834
},
tags: [ 'maui', 'tuna' ]
});
$.jscouch.couchdb.put({
name: 'hawaii.gif',
created_at: new Date(now + millisInHHour*Math.random()).toUTCString(),
user: 'bob',
type: 'gif',
camera: 'canon',
info: {
width: 320,
height: 128,
size: 49287
},
tags: [ 'maui' ]
});
$.jscouch.couchdb.put({
name: 'island.gif',
created_at: new Date(now + millisInHHour*Math.random()).toUTCString(),
user: 'zztop',
type: 'gif',
camera: 'nikon',
info: {
width: 640,
height: 480,
size: 50398
},
tags: [ 'maui' ]
});
}
});
})(jQuery);
| janl/jscouch | jscouch.documents.js | JavaScript | apache-2.0 | 3,006 |
/**
--| ADAPTIVE RUNTIME PLATFORM |----------------------------------------------------------------------------------------
(C) Copyright 2013-2015 Carlos Lozano Diez t/a Adaptive.me <http://adaptive.me>.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 . Unless required by appli-
-cable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
Original author:
* Carlos Lozano Diez
<http://github.com/carloslozano>
<http://twitter.com/adaptivecoder>
<mailto:carlos@adaptive.me>
Contributors:
* Ferran Vila Conesa
<http://github.com/fnva>
<http://twitter.com/ferran_vila>
<mailto:ferran.vila.conesa@gmail.com>
* See source code files for contributors.
Release:
* @version v2.2.0
-------------------------------------------| aut inveniam viam aut faciam |--------------------------------------------
*/
using System;
namespace Adaptive.Arp.Api
{
/**
Enumeration INetworkStatusListenerError
*/
public enum INetworkStatusListenerError {
NoPermission,
Unreachable,
Unknown
}
}
| AdaptiveMe/adaptive-arp-windows | adaptive-arp-lib/adaptive-arp-lib/Sources/Adaptive.Arp.Api/INetworkStatusListenerError.cs | C# | apache-2.0 | 1,523 |
import {ShopOrderDetail} from './ShopOrderDetail';
export class ShopOrder {
order_id: string;
user_id: number;
username: string;
is_vip: number;
payment: number;
order_no: number;
shopway: number;
status: number;
employee_id: string;
buytime: Date;
orderdealtime: Date;
phone: string;
address: string;
soft_delete: string;
subPrice:number;
shopOrderDetails: ShopOrderDetail[];
}
| TZClub/OMIPlatform | admin-web/src/app/entity/shop/ShopOrder.ts | TypeScript | apache-2.0 | 412 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.stats;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.SearchContext;
import java.io.IOException;
import java.util.Map;
public class StatsAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, StatsAggregationBuilder> {
public static final String NAME = "stats";
private static final ObjectParser<StatsAggregationBuilder, Void> PARSER;
static {
PARSER = new ObjectParser<>(StatsAggregationBuilder.NAME);
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false);
}
public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(parser, new StatsAggregationBuilder(aggregationName), null);
}
public StatsAggregationBuilder(String name) {
super(name, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
}
protected StatsAggregationBuilder(StatsAggregationBuilder clone,
Builder factoriesBuilder, Map<String, Object> metaData) {
super(clone, factoriesBuilder, metaData);
}
@Override
public AggregationBuilder shallowCopy(Builder factoriesBuilder, Map<String, Object> metaData) {
return new StatsAggregationBuilder(this, factoriesBuilder, metaData);
}
/**
* Read from a stream.
*/
public StatsAggregationBuilder(StreamInput in) throws IOException {
super(in, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
}
@Override
protected void innerWriteTo(StreamOutput out) {
// Do nothing, no extra state to write to stream
}
@Override
protected StatsAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig<Numeric> config,
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
return new StatsAggregatorFactory(name, config, context, parent, subFactoriesBuilder, metaData);
}
@Override
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
return builder;
}
@Override
protected int innerHashCode() {
return 0;
}
@Override
protected boolean innerEquals(Object obj) {
return true;
}
@Override
public String getType() {
return NAME;
}
}
| jprante/elasticsearch-server | server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregationBuilder.java | Java | apache-2.0 | 4,154 |
package jenkins.plugins.hygieia;
public class DefaultHygieiaServiceStub extends DefaultHygieiaService {
// private HttpClientStub httpClientStub;
public DefaultHygieiaServiceStub(String host, String token, String name) {
super(host, token, name);
}
// @Override
// public HttpClientStub getHttpClient() {
// return httpClientStub;
// }
// public void setHttpClient(HttpClientStub httpClientStub) {
// this.httpClientStub = httpClientStub;
// }
}
| tabladrum/hygieia-jenkins-plugin | src/test/java/jenkins/plugins/hygieia/DefaultHygieiaServiceStub.java | Java | apache-2.0 | 499 |
package cn.aezo.demo.rabbitmq.c05_model_topic;
import cn.aezo.demo.rabbitmq.util.RabbitmqU;
import com.rabbitmq.client.AMQP;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.DefaultConsumer;
import com.rabbitmq.client.Envelope;
import java.io.IOException;
/**
* 测试 topic 类型消息模型
*
* 结果如下:
* consumer1收到消息:[aezo.order.vip] 这是 0 条消息
* consumer2收到消息:[smalle.vip] 这是 2 条消息
* consumer1收到消息:[aezo.user] 这是 1 条消息
* consumer1收到消息:[smalle.vip] 这是 2 条消息
* consumer1收到消息:[aezo.order.vip] 这是 3 条消息
* consumer1收到消息:[aezo.user] 这是 4 条消息
*
* @author smalle
* @date 2020-08-29 16:31
*/
public class Consumer {
private static final String EXCHANGE_NAME = "topic_logs";
public static void main(String[] args) throws IOException {
consumer1();
consumer2();
}
public static void consumer1() throws IOException {
Connection connection = RabbitmqU.getConnection();
Channel channel = connection.createChannel();
channel.exchangeDeclare(EXCHANGE_NAME, "topic");
// 获取一个临时队列。管理后台的Queues-Features会增加"AD"(autoDelete)和"Excl"(exclusive)标识
String queueName = channel.queueDeclare().getQueue();
// **将临时队列和交换机绑定,并订阅某些类型的消息**
channel.queueBind(queueName, EXCHANGE_NAME, "aezo.#"); // *匹配一个单词,#匹配多个单词
channel.queueBind(queueName, EXCHANGE_NAME, "*.vip");
channel.basicConsume(queueName, true, new DefaultConsumer(channel) {
@Override
public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException {
System.out.println("consumer1收到消息:" + new String(body, "UTF-8"));
}
});
}
public static void consumer2() throws IOException {
Connection connection = RabbitmqU.getConnection();
Channel channel = connection.createChannel();
channel.exchangeDeclare(EXCHANGE_NAME, "topic");
String queueName = channel.queueDeclare().getQueue();
// * 表示一个单词。此时无法匹配 aezo.order.vip 和 aezo.vip.hello 等
channel.queueBind(queueName, EXCHANGE_NAME, "*.vip");
channel.basicConsume(queueName, true, new DefaultConsumer(channel) {
@Override
public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException {
System.out.println("consumer2收到消息:" + new String(body, "UTF-8"));
}
});
}
}
| oldinaction/smjava | rabbitmq/src/main/java/cn/aezo/demo/rabbitmq/c05_model_topic/Consumer.java | Java | apache-2.0 | 2,828 |
<?php
include("config.php");
$content = file_get_contents($data_munched);
$data = json_decode($content, true);
function sort_by_order ($a, $b)
{
return $b['amount']['total'] - $a['amount']['total'];
}
usort($data["exchanges"], 'sort_by_order');
print_r($data);
?> | bananenwilly/alix | walls/data_test.php | PHP | apache-2.0 | 271 |
class GetSharedLinkAccessRightsResult
attr_accessor :access_rights
# :internal => :external
def self.attribute_map
{
:access_rights => :accessRights
}
end
def initialize(attributes = {})
# Morph attribute keys into undescored rubyish style
if attributes.to_s != ""
if GetSharedLinkAccessRightsResult.attribute_map["access_rights".to_sym] != nil
name = "access_rights".to_sym
value = attributes["accessRights"]
send("#{name}=", value) if self.respond_to?(name)
end
end
end
def to_body
body = {}
GetSharedLinkAccessRightsResult.attribute_map.each_pair do |key,value|
body[value] = self.send(key) unless self.send(key).nil?
end
body
end
end
| liosha2007/groupdocs-ruby | groupdocs/models/getsharedlinkaccessrightsresult.rb | Ruby | apache-2.0 | 743 |
///<reference path="Math.ts"/>
class Entity {
get X(): number {
return this.Position.x;
}
get Y(): number {
return this.Position.y;
}
get Z(): number {
return this.Position.z;
}
constructor(public Position: Vec3) {
}
} | piersh/GLander | src/Entity.ts | TypeScript | apache-2.0 | 233 |
#pragma once
#include "indexer/feature_decl.hpp"
#include "geometry/point2d.hpp"
#include "std/initializer_list.hpp"
#include "std/limits.hpp"
#include "std/string.hpp"
#include "std/vector.hpp"
#include "3party/osrm/osrm-backend/typedefs.h"
namespace routing
{
using TNodeId = uint32_t;
using TEdgeWeight = double;
/// \brief Unique identification for a road edge between two junctions (joints).
/// In case of OSRM it's NodeID and in case of RoadGraph (IndexGraph)
/// it's mwm id, feature id, segment id and direction.
struct UniNodeId
{
enum class Type
{
Osrm,
Mwm,
};
UniNodeId(Type type) : m_type(type) {}
UniNodeId(FeatureID const & featureId, uint32_t segId, bool forward)
: m_type(Type::Mwm), m_featureId(featureId), m_segId(segId), m_forward(forward)
{
}
UniNodeId(uint32_t nodeId) : m_type(Type::Osrm), m_nodeId(nodeId) {}
bool operator==(UniNodeId const & rh) const;
bool operator<(UniNodeId const & rh) const;
void Clear();
uint32_t GetNodeId() const;
FeatureID const & GetFeature() const;
uint32_t GetSegId() const;
bool IsForward() const;
private:
Type m_type;
/// \note In case of OSRM unique id is kept in |m_featureId.m_index|.
/// So |m_featureId.m_mwmId|, |m_segId| and |m_forward| have default values.
FeatureID m_featureId; // |m_featureId.m_index| is NodeID for OSRM.
uint32_t m_segId = 0; // Not valid for OSRM.
bool m_forward = true; // Segment direction in |m_featureId|.
NodeID m_nodeId = SPECIAL_NODEID;
};
string DebugPrint(UniNodeId::Type type);
namespace turns
{
/// @todo(vbykoianko) It's a good idea to gather all the turns information into one entity.
/// For the time being several separate entities reflect the turn information. Like Route::TTurns
double constexpr kFeaturesNearTurnMeters = 3.0;
/*!
* \warning The order of values below shall not be changed.
* TurnRight(TurnLeft) must have a minimal value and
* TurnSlightRight(TurnSlightLeft) must have a maximum value
* \warning The values of TurnDirection shall be synchronized with values of TurnDirection enum in
* java.
*/
enum class TurnDirection
{
NoTurn = 0,
GoStraight,
TurnRight,
TurnSharpRight,
TurnSlightRight,
TurnLeft,
TurnSharpLeft,
TurnSlightLeft,
UTurnLeft,
UTurnRight,
TakeTheExit,
EnterRoundAbout,
LeaveRoundAbout,
StayOnRoundAbout,
StartAtEndOfStreet,
ReachedYourDestination,
Count /**< This value is used for internals only. */
};
string DebugPrint(TurnDirection const l);
/*!
* \warning The values of PedestrianDirectionType shall be synchronized with values in java
*/
enum class PedestrianDirection
{
None = 0,
Upstairs,
Downstairs,
LiftGate,
Gate,
ReachedYourDestination,
Count /**< This value is used for internals only. */
};
string DebugPrint(PedestrianDirection const l);
/*!
* \warning The values of LaneWay shall be synchronized with values of LaneWay enum in java.
*/
enum class LaneWay
{
None = 0,
Reverse,
SharpLeft,
Left,
SlightLeft,
MergeToRight,
Through,
MergeToLeft,
SlightRight,
Right,
SharpRight,
Count /**< This value is used for internals only. */
};
string DebugPrint(LaneWay const l);
typedef vector<LaneWay> TSingleLane;
struct SingleLaneInfo
{
TSingleLane m_lane;
bool m_isRecommended = false;
SingleLaneInfo() = default;
SingleLaneInfo(initializer_list<LaneWay> const & l) : m_lane(l) {}
bool operator==(SingleLaneInfo const & other) const;
};
string DebugPrint(SingleLaneInfo const & singleLaneInfo);
struct TurnItem
{
TurnItem()
: m_index(numeric_limits<uint32_t>::max()),
m_turn(TurnDirection::NoTurn),
m_exitNum(0),
m_keepAnyway(false),
m_pedestrianTurn(PedestrianDirection::None)
{
}
TurnItem(uint32_t idx, TurnDirection t, uint32_t exitNum = 0)
: m_index(idx), m_turn(t), m_exitNum(exitNum), m_keepAnyway(false)
, m_pedestrianTurn(PedestrianDirection::None)
{
}
TurnItem(uint32_t idx, PedestrianDirection p)
: m_index(idx), m_turn(TurnDirection::NoTurn), m_exitNum(0), m_keepAnyway(false)
, m_pedestrianTurn(p)
{
}
bool operator==(TurnItem const & rhs) const
{
return m_index == rhs.m_index && m_turn == rhs.m_turn && m_lanes == rhs.m_lanes &&
m_exitNum == rhs.m_exitNum && m_sourceName == rhs.m_sourceName &&
m_targetName == rhs.m_targetName && m_keepAnyway == rhs.m_keepAnyway &&
m_pedestrianTurn == rhs.m_pedestrianTurn;
}
uint32_t m_index; /*!< Index of point on polyline (number of segment + 1). */
TurnDirection m_turn; /*!< The turn instruction of the TurnItem */
vector<SingleLaneInfo> m_lanes; /*!< Lane information on the edge before the turn. */
uint32_t m_exitNum; /*!< Number of exit on roundabout. */
string m_sourceName; /*!< Name of the street which the ingoing edge belongs to */
string m_targetName; /*!< Name of the street which the outgoing edge belongs to */
/*!
* \brief m_keepAnyway is true if the turn shall not be deleted
* and shall be demonstrated to an end user.
*/
bool m_keepAnyway;
/*!
* \brief m_pedestrianTurn is type of corresponding direction for a pedestrian, or None
* if there is no pedestrian specific direction
*/
PedestrianDirection m_pedestrianTurn;
};
string DebugPrint(TurnItem const & turnItem);
struct TurnItemDist
{
TurnItem m_turnItem;
double m_distMeters;
};
string DebugPrint(TurnItemDist const & turnItemDist);
string const GetTurnString(TurnDirection turn);
bool IsLeftTurn(TurnDirection t);
bool IsRightTurn(TurnDirection t);
bool IsLeftOrRightTurn(TurnDirection t);
bool IsStayOnRoad(TurnDirection t);
bool IsGoStraightOrSlightTurn(TurnDirection t);
/*!
* \param l A variant of going along a lane.
* \param t A turn direction.
* \return True if @l corresponds with @t exactly. For example it returns true
* when @l equals to LaneWay::Right and @t equals to TurnDirection::TurnRight.
* Otherwise it returns false.
*/
bool IsLaneWayConformedTurnDirection(LaneWay l, TurnDirection t);
/*!
* \param l A variant of going along a lane.
* \param t A turn direction.
* \return True if @l corresponds with @t approximately. For example it returns true
* when @l equals to LaneWay::Right and @t equals to TurnDirection::TurnSlightRight.
* Otherwise it returns false.
*/
bool IsLaneWayConformedTurnDirectionApproximately(LaneWay l, TurnDirection t);
/*!
* \brief Parse lane information which comes from @lanesString
* \param lanesString lane information. Example through|through|through|through;right
* \param lanes the result of parsing.
* \return true if @lanesString parsed successfully, false otherwise.
* Note 1: if @lanesString is empty returns false.
* Note 2: @laneString is passed by value on purpose. It'll be used(changed) in the method.
*/
bool ParseLanes(string lanesString, vector<SingleLaneInfo> & lanes);
void SplitLanes(string const & lanesString, char delimiter, vector<string> & lanes);
bool ParseSingleLane(string const & laneString, char delimiter, TSingleLane & lane);
/*!
* \returns pi minus angle from vector [junctionPoint, ingoingPoint]
* to vector [junctionPoint, outgoingPoint]. A counterclockwise rotation.
* Angle is in range [-pi, pi].
*/
double PiMinusTwoVectorsAngle(m2::PointD const & junctionPoint, m2::PointD const & ingoingPoint,
m2::PointD const & outgoingPoint);
} // namespace turns
} // namespace routing
| Zverik/omim | routing/turns.hpp | C++ | apache-2.0 | 7,503 |
define(function (require) {
var SymbolDraw = require('../helper/SymbolDraw');
var LargeSymbolDraw = require('../helper/LargeSymbolDraw');
require('../../echarts').extendChartView({
type: 'scatter',
init: function () {
this._normalSymbolDraw = new SymbolDraw();
this._largeSymbolDraw = new LargeSymbolDraw();
},
render: function (seriesModel, ecModel, api) {
var data = seriesModel.getData();
var largeSymbolDraw = this._largeSymbolDraw;
var normalSymbolDraw = this._normalSymbolDraw;
var group = this.group;
var symbolDraw = seriesModel.get('large') && data.count() > seriesModel.get('largeThreshold')
? largeSymbolDraw : normalSymbolDraw;
this._symbolDraw = symbolDraw;
symbolDraw.updateData(data);
group.add(symbolDraw.group);
group.remove(
symbolDraw === largeSymbolDraw
? normalSymbolDraw.group : largeSymbolDraw.group
);
},
updateLayout: function (seriesModel) {
this._symbolDraw.updateLayout(seriesModel);
},
remove: function (ecModel, api) {
this._symbolDraw && this._symbolDraw.remove(api, true);
}
});
}); | aholake/hiringviet | src/main/webapp/resources/vendors/echarts/src/chart/scatter/ScatterView.js | JavaScript | apache-2.0 | 1,369 |
/**
* @author fanguozhu
*/
$(function()
{
var tab = new TabPanel("tab",true);
var f_tree = new Fieldset("f_tree","公司列表",{
state: Fieldset.OPEN_STATE,
topdown: false
});
var mytree = new PorTreeT("tree", "-1", "手机公司",{isDefaultClick:0} );
var dw = new DataWrapper();
dw.service("PRtree");
mytree.dataWrapper(dw);
tab.dataWrapper(dw);
tab.addChangeObserver(function(src, msg){
var dw = this.dataWrapper();
if (!dw) return;
var label = msg.data.label;
var name = msg.data.name;
for (var i=1;i<=3;i++) {
if (label) {
this.setTitle(i,"["+name+"]公司产品["+i+"]");
} else {
this.setTitle(i,"全部公司产品["+i+"]");
}
}
},PorMessage.MSG_TREE_ONCLICK);
var dw1 = new DataWrapper();
dw1.service("PR02");
var dw2 = new DataWrapper();
dw2.service("PR02");
var dw3 = new DataWrapper();
dw3.service("PR02");
var dg1 = new DataGrid("grid_1",{autoDraw:true,readonly:true});
dg1.dataWrapper(dw1);
var dg2 = new DataGrid("grid_2",{autoDraw:true,readonly:true});
dg2.dataWrapper(dw2);
var dg3 = new DataGrid("grid_3",{autoDraw:true,readonly:true});
dg3.dataWrapper(dw3);
var mapping = {
master:["label"],
sub:["company"]
};
PorUtil.linkTreeAndGrid( dw,[{
sub:dw1,
mapping:mapping,
tabs:{
tab:[0,1] //配置在'tab'的第1个tab页需要加载
}
},{
sub:dw2,
mapping:mapping,
tabs:{
tab:[2] //配置在'tab'的第2个tab页需要加载
}
},{
sub:dw3,
mapping:mapping,
tabs:{
tab:[3] //配置在'tab'的第3个tab页需要加载
}
}]);
mytree.init();
});
| stserp/erp1 | source/web/por2/demo/Demo/TreeTabDemo.js | JavaScript | apache-2.0 | 1,666 |
/*
* Core Utils - Common Utilities.
* Copyright 2015-2016 GRyCAP (Universitat Politecnica de Valencia)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This product combines work with different licenses. See the "NOTICE" text
* file for details on the various modules and licenses.
*
* The "NOTICE" text file is part of the distribution. Any derivative works
* that you distribute must include a readable copy of the "NOTICE" text file.
*/
package es.upv.grycap.coreutils.common;
import com.google.common.collect.Range;
/**
* Hard-coded configuration limits.
* @author Erik Torres
* @since 0.2.0
*/
public interface CoreutilsLimits {
public static final int NUM_AVAILABLE_PROCESSORS = Runtime.getRuntime().availableProcessors();
public static final Range<Long> TRY_LOCK_TIMEOUT_RANGE = Range.closed(1l, 2000l);
public static final Range<Integer> MAX_POOL_SIZE_RANGE = Range.closed(Math.min(2, NUM_AVAILABLE_PROCESSORS), Math.max(128, NUM_AVAILABLE_PROCESSORS));
public static final Range<Long> KEEP_ALIVE_TIME_RANGE = Range.closed(60000l, 3600000l);
public static final Range<Long> WAIT_TERMINATION_TIMEOUT_RANGE = Range.closed(1000l, 60000l);
} | grycap/coreutils | coreutils-common/src/main/java/es/upv/grycap/coreutils/common/CoreutilsLimits.java | Java | apache-2.0 | 1,687 |
/*
* Copyright (c) 2016 Ni YueMing<niyueming@163.com>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*
*/
package net.nym.napply.library.cookie.store;
/**
* Created by zhy on 16/3/10.
*/
public interface HasCookieStore
{
CookieStore getCookieStore();
}
| niyueming/NApply | library/src/main/java/net/nym/napply/library/cookie/store/HasCookieStore.java | Java | apache-2.0 | 754 |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1.stub;
import com.google.api.core.BetaApi;
import com.google.api.gax.httpjson.HttpJsonCallSettings;
import com.google.api.gax.httpjson.HttpJsonCallableFactory;
import com.google.api.gax.httpjson.HttpJsonOperationSnapshotCallable;
import com.google.api.gax.httpjson.HttpJsonStubCallableFactory;
import com.google.api.gax.rpc.BatchingCallSettings;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.ServerStreamingCallSettings;
import com.google.api.gax.rpc.ServerStreamingCallable;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.compute.v1.Operation;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* REST callable factory implementation for the Routes service API.
*
* <p>This class is for advanced usage.
*/
@Generated("by gapic-generator-java")
@BetaApi
public class HttpJsonRoutesCallableFactory
implements HttpJsonStubCallableFactory<Operation, GlobalOperationsStub> {
@Override
public <RequestT, ResponseT> UnaryCallable<RequestT, ResponseT> createUnaryCallable(
HttpJsonCallSettings<RequestT, ResponseT> httpJsonCallSettings,
UnaryCallSettings<RequestT, ResponseT> callSettings,
ClientContext clientContext) {
return HttpJsonCallableFactory.createUnaryCallable(
httpJsonCallSettings, callSettings, clientContext);
}
@Override
public <RequestT, ResponseT, PagedListResponseT>
UnaryCallable<RequestT, PagedListResponseT> createPagedCallable(
HttpJsonCallSettings<RequestT, ResponseT> httpJsonCallSettings,
PagedCallSettings<RequestT, ResponseT, PagedListResponseT> callSettings,
ClientContext clientContext) {
return HttpJsonCallableFactory.createPagedCallable(
httpJsonCallSettings, callSettings, clientContext);
}
@Override
public <RequestT, ResponseT> UnaryCallable<RequestT, ResponseT> createBatchingCallable(
HttpJsonCallSettings<RequestT, ResponseT> httpJsonCallSettings,
BatchingCallSettings<RequestT, ResponseT> callSettings,
ClientContext clientContext) {
return HttpJsonCallableFactory.createBatchingCallable(
httpJsonCallSettings, callSettings, clientContext);
}
@BetaApi(
"The surface for long-running operations is not stable yet and may change in the future.")
@Override
public <RequestT, ResponseT, MetadataT>
OperationCallable<RequestT, ResponseT, MetadataT> createOperationCallable(
HttpJsonCallSettings<RequestT, Operation> httpJsonCallSettings,
OperationCallSettings<RequestT, ResponseT, MetadataT> callSettings,
ClientContext clientContext,
GlobalOperationsStub operationsStub) {
UnaryCallable<RequestT, Operation> innerCallable =
HttpJsonCallableFactory.createBaseUnaryCallable(
httpJsonCallSettings, callSettings.getInitialCallSettings(), clientContext);
HttpJsonOperationSnapshotCallable<RequestT, Operation> initialCallable =
new HttpJsonOperationSnapshotCallable<RequestT, Operation>(
innerCallable,
httpJsonCallSettings.getMethodDescriptor().getOperationSnapshotFactory());
return HttpJsonCallableFactory.createOperationCallable(
callSettings, clientContext, operationsStub.longRunningClient(), initialCallable);
}
@Override
public <RequestT, ResponseT>
ServerStreamingCallable<RequestT, ResponseT> createServerStreamingCallable(
HttpJsonCallSettings<RequestT, ResponseT> httpJsonCallSettings,
ServerStreamingCallSettings<RequestT, ResponseT> callSettings,
ClientContext clientContext) {
return HttpJsonCallableFactory.createServerStreamingCallable(
httpJsonCallSettings, callSettings, clientContext);
}
}
| googleapis/java-compute | google-cloud-compute/src/main/java/com/google/cloud/compute/v1/stub/HttpJsonRoutesCallableFactory.java | Java | apache-2.0 | 4,575 |
/*
Copyright 2013 Semantic Discovery, Inc. (www.semanticdiscovery.com)
This file is part of the Semantic Discovery Toolkit.
The Semantic Discovery Toolkit is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The Semantic Discovery Toolkit is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with The Semantic Discovery Toolkit. If not, see <http://www.gnu.org/licenses/>.
*/
package org.sd.token;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.sd.nlp.NormalizedString;
/**
* An nlp.NormalizedString implementation based on this package's tokenization.
* <p>
* @author Spence Koehler
*/
public class TokenizerNormalizedString implements NormalizedString {
private TokenizerBasedNormalizer normalizer;
private StandardTokenizer tokenizer;
private boolean lowerCaseFlag;
private boolean computed;
private Normalization _normalization;
/**
* Construct with default case-sensitive settings.
*/
public TokenizerNormalizedString(String string) {
this(TokenizerBasedNormalizer.CASE_SENSITIVE_INSTANCE, new StandardTokenizer(string, TokenizerBasedNormalizer.DEFAULT_TOKENIZER_OPTIONS), false);
}
/**
* Construct with the given settings.
*/
public TokenizerNormalizedString(TokenizerBasedNormalizer normalizer, StandardTokenizer tokenizer, boolean lowerCaseFlag) {
this.normalizer = normalizer;
this.tokenizer = tokenizer;
this.lowerCaseFlag = lowerCaseFlag;
this.computed = false;
reset();
}
public StandardTokenizer getTokenizer() {
return tokenizer;
}
public void setLowerCaseFlag(boolean lowerCaseFlag) {
if (lowerCaseFlag != this.lowerCaseFlag) {
this.lowerCaseFlag = lowerCaseFlag;
reset();
}
}
public boolean getLowerCaseFlag() {
return lowerCaseFlag;
}
/**
* Set a flag indicating whether to split on camel-casing.
*/
public void setSplitOnCamelCase(boolean splitOnCamelCase) {
final Break curBreak = tokenizer.getOptions().getLowerUpperBreak();
final Break nextBreak = splitOnCamelCase ? Break.ZERO_WIDTH_SOFT_BREAK : Break.NO_BREAK;
if (curBreak != nextBreak) {
final StandardTokenizerOptions newOptions = new StandardTokenizerOptions(tokenizer.getOptions());
newOptions.setLowerUpperBreak(nextBreak);
this.tokenizer = new StandardTokenizer(tokenizer.getText(), newOptions);
reset();
}
}
private final void reset() {
this.computed = false;
}
/**
* Get the flag indicating whether to split on camel-casing.
*/
public boolean getSplitOnCamelCase() {
return tokenizer.getOptions().getLowerUpperBreak() != Break.NO_BREAK;
}
/**
* Get the length of the normalized string.
*/
public int getNormalizedLength() {
return getNormalized().length();
}
/**
* Get the normalized string.
* <p>
* Note that the normalized string may apply to only a portion of the full
* original string.
*/
public String getNormalized() {
return getNormalization().getNormalized();
}
public String toString() {
return getNormalized();
}
/**
* Get the normalized string from the start (inclusive) to end (exclusive).
* <p>
* Note that the normalized string may apply to only a portion of the full
* original string.
*/
public String getNormalized(int startPos, int endPos) {
return getNormalized().substring(startPos, endPos);
}
/**
* Get the original string that applies to the normalized string.
*/
public String getOriginal() {
return tokenizer.getText();
}
/**
* Get the original string that applies to the normalized string from the
* given index for the given number of normalized characters.
*/
public String getOriginal(int normalizedStartIndex, int normalizedLength) {
final int origStartIdx = getOriginalIndex(normalizedStartIndex);
final int origEndIdx = getOriginalIndex(normalizedStartIndex + normalizedLength);
return getOriginal().substring(origStartIdx, origEndIdx);
}
/**
* Get the index in the original string corresponding to the normalized index.
*/
public int getOriginalIndex(int normalizedIndex) {
final Integer result = getNormalization().getOriginalIndex(normalizedIndex);
return result == null ? -1 : result;
}
/**
* Get a new normalized string for the portion of this normalized string
* preceding the normalized start index (exclusive). Remove extra whitespace
* at the end of the returned string. Ensure that the returned string ends
* on an end token boundary.
*
* @return the preceding normalized string or null if empty (after skipping white).
*/
public NormalizedString getPreceding(int normalizedStartIndex) {
return getPreceding(normalizedStartIndex, true);
}
/**
* Get a new normalized string for the portion of this normalized string
* preceding the normalized start index (exclusive). Remove extra whitespace
* at the end of the returned string.
*
* @param normalizedStartIndex a token start position in the normalized string beyond the result
* @param checkEndBreak when true, skip back over non breaking chars to ensure result ends at a break.
*
* @return the preceding normalized string or null if empty (after skipping white).
*/
public NormalizedString getPreceding(int normalizedStartIndex, boolean checkEndBreak) {
NormalizedString result = null;
final int origIdx = getOriginalIndex(normalizedStartIndex);
if (origIdx >= 0) {
final Token token = tokenizer.getToken(origIdx);
if (token != null) {
final String priorText =
checkEndBreak ? tokenizer.getPriorText(token) :
tokenizer.getText().substring(0, token.getStartIndex()).trim();
if (!"".equals(priorText)) {
result = normalizer.normalize(priorText);
}
}
}
return result;
}
/**
* Find the (normalized) index of the nth token preceding the normalizedPos.
* <p>
* If normalizedPos is -1, start from the end of the string.
* If the beginning of the string is fewer than numTokens prior to normalizedPos,
* return the beginning of the string.
*/
public int getPrecedingIndex(int normalizedPos, int numTokens) {
int result = normalizedPos < 0 ? getNormalization().getNormalizedLength() : normalizedPos;
// skip back to the numTokens-th start break.
int numStarts = 0;
for (; result > 0 && numStarts < numTokens; result = findPrecedingTokenStart(result)) {
++numStarts;
}
return result;
}
/**
* Find the start of the token before the normalizedPos.
* <p>
* If normalizedPos is at a token start, the prior token (or -1 if there is
* no prior token) will be returned; otherwise, the start of the token of
* which normalizedPos is a part will be returned.
*/
public int findPrecedingTokenStart(int normalizedPos) {
final Integer priorStart = getNormalization().getBreaks().lower(normalizedPos);
return priorStart == null ? -1 : priorStart;
}
/**
* Get a new normalized string for the portion of this normalized string
* following the normalized start index (inclusive). Remove extra whitespace
* at the beginning of the returned string.
*
* @return the following normalized string or null if empty (after skipping white).
*/
public NormalizedString getRemaining(int normalizedStartIndex) {
NormalizedString result = null;
final int origIdx = getOriginalIndex(normalizedStartIndex);
if (origIdx >= 0) {
final String origText = tokenizer.getText();
final int origLen = origText.length();
if (origIdx < origLen) {
final String remainingText = origText.substring(origIdx).trim();
if (!"".equals(remainingText)) {
result = normalizer.normalize(remainingText);
}
}
}
return result;
}
/**
* Build a normalized string from this using the given normalized index range.
*/
public NormalizedString buildNormalizedString(int normalizedStartIndex, int normalizedEndIndex) {
NormalizedString result = null;
final int origStartIdx = getOriginalIndex(normalizedStartIndex);
if (origStartIdx >= 0) {
final int origEndIdx = getOriginalIndex(normalizedEndIndex);
if (origEndIdx > origStartIdx) {
final String origText = tokenizer.getText();
final int origLen = origText.length();
if (origStartIdx < origLen) {
final String string = origText.substring(origStartIdx, Math.min(origEndIdx, origLen));
result = normalizer.normalize(string);
}
}
}
return result;
}
/**
* Lowercase the normalized form in this instance.
*
* @return this instance.
*/
public NormalizedString toLowerCase() {
getNormalization().toLowerCase();
return this;
}
/**
* Get the normalized string's chars.
*/
public char[] getNormalizedChars() {
return getNormalization().getNormalizedChars();
}
/**
* Get the normalized char at the given (normalized) index.
* <p>
* NOTE: Bounds checking is left up to the caller.
*/
public char getNormalizedChar(int index) {
return getNormalization().getNormalizedChars()[index];
}
/**
* Get the original code point corresponding to the normalized char at the
* (normalized) index.
* <p>
* NOTE: Bounds checking is left up to the caller.
*/
public int getOriginalCodePoint(int nIndex) {
int result = 0;
final int origIdx = getOriginalIndex(nIndex);
if (origIdx >= 0) {
final String origText = tokenizer.getText();
final int origLen = origText.length();
if (origIdx < origLen) {
result = origText.codePointAt(origIdx);
}
}
return result;
}
/**
* Determine whether the original character corresponding to the normalized
* index is a letter or digit.
*/
public boolean isLetterOrDigit(int nIndex) {
return Character.isLetterOrDigit(getOriginalCodePoint(nIndex));
}
/**
* Get the ORIGINAL index of the first symbol (non-letter, digit, or white
* character) prior to the NORMALIZED index in the full original string.
*
* @return -1 if no symbol is found or the index of the found symbol in the
* original input string.
*/
public int findPreviousSymbolIndex(int nIndex) {
int result = -1;
final int origIdx = getOriginalIndex(nIndex);
if (origIdx >= 0) {
final String origText = tokenizer.getText();
final int origLen = origText.length();
for (result = Math.min(origIdx, origLen) - 1; result >= 0; --result) {
final int cp = origText.codePointAt(result);
if (cp != ' ' && !Character.isLetterOrDigit(cp)) break;
}
}
return result;
}
/**
* Determine whether the normalized string has a digit between the normalized
* start (inclusive) and end (exclusive).
*/
public boolean hasDigit(int nStartIndex, int nEndIndex) {
boolean result = false;
final char[] nchars = getNormalization().getNormalizedChars();
nEndIndex = Math.min(nEndIndex, nchars.length);
for (int idx = Math.max(nStartIndex, 0); idx < nEndIndex; ++idx) {
final char c = nchars[idx];
if (c <= '9' && c >= '0') {
result = true;
break;
}
}
return result;
}
/**
* Count the number of normalized words in the given range.
*/
public int numWords(int nStartIndex, int nEndIndex) {
int result = 0;
final Normalization normalization = getNormalization();
final TreeSet<Integer> breaks = normalization.getBreaks();
final int nLen = normalization.getNormalizedLength();
nEndIndex = Math.min(nEndIndex, nLen);
for (int idx = Math.max(nStartIndex, 0); idx < nEndIndex && idx >= 0; idx = breaks.higher(idx)) {
if (idx == nEndIndex - 1) break; // nEndIdex as at the beginning of a word -- doesn't count
++result;
}
return result;
}
/**
* Determine whether there is a break before the normalized startIndex.
*/
public boolean isStartBreak(int startIndex) {
return getNormalization().isBreak(startIndex - 1);
}
/**
* Determine whether there is a break after the normalized endIndex.
*/
public boolean isEndBreak(int endIndex) {
return getNormalization().isBreak(endIndex + 1);
}
/**
* Get (first) the normalized index that best corresponds to the original index.
*/
public int getNormalizedIndex(int originalIndex) {
return getNormalization().getNormalizedIndex(originalIndex);
}
/**
* Split into normalized token strings.
*/
public String[] split() {
return getNormalization().getNormalized().split("\\s+");
}
/**
* Split into normalized token strings, removing stopwords.
*/
public String[] split(Set<String> stopwords) {
final List<String> result = new ArrayList<String>();
for (NormalizedToken token = getToken(0, true); token != null; token = token.getNext(true)) {
final String ntoken = token.getNormalized();
if (stopwords == null || !stopwords.contains(ntoken)) {
result.add(ntoken);
}
}
return result.toArray(new String[result.size()]);
}
/**
* Split this normalized string into tokens.
*/
public NormalizedToken[] tokenize() {
final List<NormalizedToken> result = new ArrayList<NormalizedToken>();
for (NormalizedToken token = getToken(0, true); token != null; token = token.getNext(true)) {
result.add(token);
}
return result.toArray(new NormalizedToken[result.size()]);
}
/**
* Get the token starting from the start position, optionally skipping to a
* start break first.
*
* @return the token or null if there are no tokens to get.
*/
public NormalizedToken getToken(int startPos, boolean skipToBreak) {
NormalizedToken result = null;
startPos = getTokenStart(startPos, skipToBreak);
if (startPos < getNormalization().getNormalizedLength()) {
final int endPos = getTokenEnd(startPos);
result = new NormalizedToken(this, startPos, endPos);
}
return result;
}
/**
* Get the token after the given token, optionally skipping to a start
* break first.
*/
public NormalizedToken getNextToken(NormalizedToken curToken, boolean skipToBreak) {
NormalizedToken result = null;
if (curToken != null) {
final Normalization normalization = getNormalization();
final TreeSet<Integer> breaks = normalization.getBreaks();
final int nLen = normalization.getNormalizedLength();
int curEndPos = curToken.getEndPos();
if (skipToBreak && !normalization.isBreak(curEndPos)) {
final Integer nextBreak = breaks.higher(curEndPos);
curEndPos = (nextBreak == null) ? nLen : nextBreak;
}
if (curEndPos < nLen) {
final int startPos = getTokenStart(curEndPos + 1, true);
if (startPos < nLen) {
final int nextEndPos = getTokenEnd(startPos);
result = new NormalizedToken(this, startPos, nextEndPos);
}
}
}
return result;
}
/**
* Get the normalized token start pos at or after (normalized) startPos
* after optionally skipping to a token start position (if not already
* at one.)
*/
private final int getTokenStart(int startPos, boolean skipToBreak) {
final Normalization normalization = getNormalization();
final TreeSet<Integer> breaks = normalization.getBreaks();
final int nLen = normalization.getNormalizedLength();
if (skipToBreak && !isStartBreak(startPos)) {
final Integer nextBreak = breaks.ceiling(startPos);
startPos = nextBreak == null ? nLen : nextBreak + 1;
}
return startPos;
}
/**
* Get the normalized index just after the token starting at (normalized) startPos.
*/
private final int getTokenEnd(int startPos) {
final Normalization normalization = getNormalization();
final TreeSet<Integer> breaks = normalization.getBreaks();
final int nLen = normalization.getNormalizedLength();
final Integer endPos = breaks.higher(startPos);
return endPos == null ? nLen : endPos;
}
protected final Normalization getNormalization() {
if (!computed) {
computeNormalization();
}
return _normalization;
}
private final void computeNormalization() {
this._normalization = buildNewNormalization(tokenizer, lowerCaseFlag);
for (Token token = tokenizer.getToken(0); token != null; token = token.getNextToken()) {
_normalization.updateWithToken(token);
}
this.computed = true;
}
protected Normalization buildNewNormalization(StandardTokenizer tokenizer, boolean lowerCaseFlag) {
return new Normalization(tokenizer, lowerCaseFlag);
}
public static class Normalization {
private StandardTokenizer tokenizer;
private boolean lowerCaseFlag;
private StringBuilder normalized;
private Map<Integer, Integer> norm2orig;
private TreeSet<Integer> breaks;
private char[] _nchars;
public Normalization(StandardTokenizer tokenizer, boolean lowerCaseFlag) {
this.tokenizer = tokenizer;
this.lowerCaseFlag = lowerCaseFlag;
this.normalized = new StringBuilder();
this.norm2orig = new HashMap<Integer, Integer>();
this.breaks = new TreeSet<Integer>();
this._nchars = null;
}
public final StandardTokenizer getTokenizer() {
return tokenizer;
}
public final boolean getLowerCaseFlag() {
return lowerCaseFlag;
}
/** Get original input. */
public final String getInput() {
return tokenizer.getText();
}
/** Get the normalized string. */
public final String getNormalized() {
return normalized.toString();
}
public final char[] getNormalizedChars() {
if (_nchars == null) {
_nchars = normalized.toString().toCharArray();
}
return _nchars;
}
public final int getNormalizedLength() {
return normalized.length();
}
public final int getOriginalIndex(int normalizedIndex) {
final Integer result =
(normalizedIndex == normalized.length()) ?
tokenizer.getText().length() :
norm2orig.get(normalizedIndex);
return result == null ? -1 : result;
}
public final int getNormalizedIndex(int originalIndex) {
int result = -1;
for (Map.Entry<Integer, Integer> entry : norm2orig.entrySet()) {
final int normIdx = entry.getKey();
final int origIdx = entry.getValue();
if (originalIndex == origIdx) {
// maps to normalized char
result = normIdx;
break;
}
else if (originalIndex > origIdx) {
// maps back to normalized white (break)
result = normIdx - 1;
break;
}
}
return result;
}
/**
* Determine whether there is a break at the given index.
*/
public final boolean isBreak(int normalizedIndex) {
return !norm2orig.containsKey(normalizedIndex);
}
/** Get the normalized break positions (not including string start or end). */
public final TreeSet<Integer> getBreaks() {
return breaks;
}
/** Lowercase this instance's normalized chars. */
public final void toLowerCase() {
final String newNorm = normalized.toString().toLowerCase();
this.normalized.setLength(0);
this.normalized.append(newNorm);
this._nchars = null;
}
/**
* Build the next normalized chars from the given token using
* the "appendX" method calls.
*/
protected void updateWithToken(Token token) {
final String tokenText = lowerCaseFlag ? token.getText().toLowerCase() : token.getText();
appendNormalizedText(token.getStartIndex(), tokenText, true);
}
/**
* Append each normalized character originally starting at startIdx.
*/
protected final void appendNormalizedText(int startIdx, String normalizedTokenText) {
appendNormalizedText(startIdx, normalizedTokenText, true);
}
/**
* Append each normalized character originally starting at startIdx.
*/
protected final void appendNormalizedText(int startIdx, String normalizedTokenText, boolean addWhite) {
final int len = normalizedTokenText.length();
for (int i = 0; i < len; ++i) {
final char c = normalizedTokenText.charAt(i);
appendNormalizedChar(startIdx++, c, addWhite && i == 0);
}
}
/**
* Append the normalized character originally starting at origIdx.
*/
protected final void appendNormalizedChar(int origIdx, char c, boolean addWhite) {
int normIdx = normalized.length();
if (normIdx > 0 && addWhite) {
normalized.append(' ');
breaks.add(normIdx++);
}
norm2orig.put(normIdx, origIdx);
normalized.append(c);
_nchars = null;
}
/**
* Append the normalized characters all expanding from the originalIdx.
*/
protected final void appendExpandedText(int origIdx, String chars) {
appendExpandedText(origIdx, chars, true);
}
/**
* Append the normalized characters all expanding from the originalIdx.
*/
protected final void appendExpandedText(int origIdx, String chars, boolean addWhite) {
final int len = chars.length();
for (int i = 0; i < chars.length(); ++i) {
final char c = chars.charAt(i);
appendNormalizedChar(origIdx, c, addWhite && i == 0);
}
}
}
}
| KoehlerSB747/sd-tools | src/main/java/org/sd/token/TokenizerNormalizedString.java | Java | apache-2.0 | 22,049 |
package lodVader.spring.REST.models.degree;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import org.junit.Test;
import com.mongodb.BasicDBList;
import com.mongodb.BasicDBObject;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import lodVader.mongodb.DBSuperClass2;
import lodVader.mongodb.collections.DatasetDB;
import lodVader.mongodb.collections.DatasetLinksetDB;
import lodVader.mongodb.collections.LinksetDB;
import lodVader.mongodb.collections.ResourceDB;
public class IndegreeDatasetModel {
public StringBuilder result = new StringBuilder();
public boolean isVocabulary = true;
public boolean isDeadLinks = false;
/**
* MapReduce functions for indegree linksets
*/
public String mapindegreeWithVocabs;
public String mapindegreeNoVocabs;
public String reduceinDegree;
class Result implements Comparator<Result>, Comparable<Result> {
int targetDataset;
int links = 0;
HashSet<Integer> sourceDatasetList = new HashSet<>();
@Override
public int compare(Result o1, Result o2) {
return o1.links - o2.links;
}
@Override
public int compareTo(Result o) {
return this.sourceDatasetList.size() - o.sourceDatasetList.size();
}
}
HashMap<Integer, Result> tmpResults = new HashMap<Integer, Result>();
ArrayList<Result> finalList = new ArrayList<Result>();
@Test
public void calc() {
/**
* MapReduce to find indegree with vocabularies
*/
result.append("===============================================================\n");
result.append("Comparing with vocabularies\n");
result.append("===============================================================\n\n");
DBCollection collection = DBSuperClass2.getDBInstance().getCollection(DatasetLinksetDB.COLLECTION_NAME);
DBCursor instances;
if(!isDeadLinks){
BasicDBList and = new BasicDBList();
// and.add(new BasicDBObject(DatasetLinksetDB.LINKS, new BasicDBObject("$gt", 0)));
// and.add(new BasicDBObject(DatasetLinksetDB.DATASET_SOURCE, new BasicDBObject("$ne", DatasetLinksetDB.DATASET_TARGET)));
instances = collection.find( new BasicDBObject(DatasetLinksetDB.LINKS, new BasicDBObject("$gt", 0)));
}
else{
BasicDBList and = new BasicDBList();
// and.add(new BasicDBObject(DatasetLinksetDB.DEAD_LINKS, new BasicDBObject("$gt", 0)));
// and.add(new BasicDBObject(DatasetLinksetDB.DATASET_SOURCE, new BasicDBObject("$ne", DatasetLinksetDB.DATASET_TARGET)));
// instances = collection.find( new BasicDBObject("$and", and));
instances = collection.find(new BasicDBObject(DatasetLinksetDB.DEAD_LINKS, new BasicDBObject("$gt", 0)));
}
for (DBObject object : instances) {
DatasetLinksetDB linkset = new DatasetLinksetDB(object);
if (linkset.getDistributionTargetIsVocabulary() == isVocabulary) {
Result result = tmpResults.get(linkset.getDatasetTarget());
if (result == null) {
result = new Result();
}
if (isDeadLinks)
result.links = result.links + linkset.getDeadLinks();
else
result.links = result.links + linkset.getLinks();
result.sourceDatasetList.add(linkset.getDatasetSource());
result.targetDataset = linkset.getDatasetTarget();
tmpResults.put(linkset.getDatasetTarget(), result);
}
}
for (Integer r : tmpResults.keySet()) {
finalList.add(tmpResults.get(r));
}
result.append("\n===== Sorted by links=======");
Collections.sort(finalList, new Result());
printTableindegree();
result.append("\n===== Sorted by number of datasets=======");
Collections.sort(finalList);
printTableindegree();
result.append("\n\n\n\n===============================================================\n");
result.append("Comparing without vocabularies\n");
result.append("===============================================================\n\n");
tmpResults = new HashMap<Integer, Result>();
finalList = new ArrayList<Result>();
isVocabulary = false;
for (DBObject object : instances) {
DatasetLinksetDB linkset = new DatasetLinksetDB(object);
if (linkset.getDistributionTargetIsVocabulary() == isVocabulary) {
Result result = tmpResults.get(linkset.getDatasetTarget());
if (result == null) {
result = new Result();
}
if (isDeadLinks)
result.links = result.links + linkset.getDeadLinks();
else
result.links = result.links + linkset.getLinks();
result.sourceDatasetList.add(linkset.getDatasetSource());
result.targetDataset = linkset.getDatasetTarget();
tmpResults.put(linkset.getDatasetTarget(), result);
}
}
for (Integer r : tmpResults.keySet()) {
finalList.add(tmpResults.get(r));
}
result.append("\n===== Sorted by links=======");
Collections.sort(finalList, new Result());
printTableindegree();
result.append("\n===== Sorted by number of datasets=======");
Collections.sort(finalList);
printTableindegree();
}
private void printTableindegree() {
result.append("\n\nName\t indegree \t Links \n");
DatasetDB tmpDataset;
for (Result r : finalList) {
tmpDataset = new DatasetDB(r.targetDataset);
result.append(tmpDataset.getTitle());
result.append("\t" + r.sourceDatasetList.size());
result.append("\t" + r.links);
result.append("\n");
}
result.append("\n\n\n");
}
}
| AKSW/LODVader | src/main/java/lodVader/spring/REST/models/degree/IndegreeDatasetModel.java | Java | apache-2.0 | 5,358 |
package org.minimalj.example.petclinic.frontend;
import org.minimalj.backend.Backend;
import org.minimalj.example.petclinic.model.Vet;
import org.minimalj.frontend.Frontend;
import org.minimalj.frontend.editor.Editor.NewObjectEditor;
import org.minimalj.frontend.form.Form;
public class AddVetEditor extends NewObjectEditor<Vet> {
@Override
protected Form<Vet> createForm() {
Form<Vet> form = new Form<>();
form.line(Vet.$.person.firstName);
form.line(Vet.$.person.lastName);
form.line(Vet.$.specialties);
return form;
}
@Override
protected Vet save(Vet owner) {
return Backend.save(owner);
}
@Override
protected void finished(Vet newVet) {
Frontend.show(new VetTablePage());
}
}
| BrunoEberhard/minimal-j | example/007_PetClinic/src/org/minimalj/example/petclinic/frontend/AddVetEditor.java | Java | apache-2.0 | 710 |
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Glance Release Notes documentation build configuration file, created by
# sphinx-quickstart on Tue Nov 3 17:40:50 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'oslosphinx',
'reno.sphinxext',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'cellar Release Notes'
copyright = u'2016, OpenStack Foundation'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
# The full version, including alpha/beta/rc tags.
release = ''
# The short X.Y version.
version = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'GlanceReleaseNotesdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'GlanceReleaseNotes.tex', u'Glance Release Notes Documentation',
u'Glance Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'glancereleasenotes', u'Glance Release Notes Documentation',
[u'Glance Developers'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'GlanceReleaseNotes', u'Glance Release Notes Documentation',
u'Glance Developers', 'GlanceReleaseNotes',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
| internap/arsenal | releasenotes/source/conf.py | Python | apache-2.0 | 8,940 |
/*
* Copyright 2019 Frederic Thevenet
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.binjr.common.javafx.controls;
import javafx.scene.Node;
import javafx.scene.SnapshotParameters;
import javafx.scene.image.WritableImage;
import javafx.scene.transform.Transform;
import javafx.stage.Screen;
public final class SnapshotUtils {
public static WritableImage outputScaleAwareSnapshot(Node node) {
return scaledSnapshot(node, 0.0,0.0);
}
public static WritableImage scaledSnapshot(Node node, double scaleX, double scaleY) {
SnapshotParameters spa = new SnapshotParameters();
spa.setTransform(Transform.scale(
scaleX == 0.0 ? Screen.getPrimary().getOutputScaleX() : scaleX,
scaleY == 0.0 ? Screen.getPrimary().getOutputScaleY() : scaleY));
return node.snapshot(spa, null);
}
}
| fthevenet/binjr | binjr-core/src/main/java/eu/binjr/common/javafx/controls/SnapshotUtils.java | Java | apache-2.0 | 1,413 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test suite for XenAPI."""
import ast
import contextlib
import datetime
import functools
import os
import re
import mox
from nova.compute import aggregate_states
from nova.compute import instance_types
from nova.compute import power_state
from nova.compute import task_states
from nova.compute import utils as compute_utils
from nova.compute import vm_states
from nova import context
from nova import db
from nova import exception
from nova import flags
from nova import log as logging
from nova.openstack.common import importutils
from nova import test
from nova.tests.db import fakes as db_fakes
from nova.tests import fake_network
from nova.tests import fake_utils
from nova.tests.glance import stubs as glance_stubs
from nova.tests.xenapi import stubs
from nova.virt.xenapi import connection as xenapi_conn
from nova.virt.xenapi import fake as xenapi_fake
from nova.virt.xenapi import vm_utils
from nova.virt.xenapi import vmops
from nova.virt.xenapi import volume_utils
LOG = logging.getLogger(__name__)
FLAGS = flags.FLAGS
def stub_vm_utils_with_vdi_attached_here(function, should_return=True):
"""
vm_utils.with_vdi_attached_here needs to be stubbed out because it
calls down to the filesystem to attach a vdi. This provides a
decorator to handle that.
"""
@functools.wraps(function)
def decorated_function(self, *args, **kwargs):
@contextlib.contextmanager
def fake_vdi_attached_here(*args, **kwargs):
fake_dev = 'fakedev'
yield fake_dev
def fake_stream_disk(*args, **kwargs):
pass
def fake_is_vdi_pv(*args, **kwargs):
return should_return
orig_vdi_attached_here = vm_utils.vdi_attached_here
orig_stream_disk = vm_utils._stream_disk
orig_is_vdi_pv = vm_utils._is_vdi_pv
try:
vm_utils.vdi_attached_here = fake_vdi_attached_here
vm_utils._stream_disk = fake_stream_disk
vm_utils._is_vdi_pv = fake_is_vdi_pv
return function(self, *args, **kwargs)
finally:
vm_utils._is_vdi_pv = orig_is_vdi_pv
vm_utils._stream_disk = orig_stream_disk
vm_utils.vdi_attached_here = orig_vdi_attached_here
return decorated_function
class XenAPIVolumeTestCase(test.TestCase):
"""Unit tests for Volume operations."""
def setUp(self):
super(XenAPIVolumeTestCase, self).setUp()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
self.flags(target_host='127.0.0.1',
xenapi_connection_url='test_url',
xenapi_connection_password='test_pass',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
db_fakes.stub_out_db_instance_api(self.stubs)
xenapi_fake.reset()
self.instance_values = {'id': 1,
'project_id': self.user_id,
'user_id': 'fake',
'image_ref': 1,
'kernel_id': 2,
'ramdisk_id': 3,
'root_gb': 20,
'instance_type_id': '3', # m1.large
'os_type': 'linux',
'architecture': 'x86-64'}
def _create_volume(self, size='0'):
"""Create a volume object."""
vol = {}
vol['size'] = size
vol['user_id'] = 'fake'
vol['project_id'] = 'fake'
vol['host'] = 'localhost'
vol['availability_zone'] = FLAGS.storage_availability_zone
vol['status'] = "creating"
vol['attach_status'] = "detached"
return db.volume_create(self.context, vol)
@staticmethod
def _make_info():
return {
'driver_volume_type': 'iscsi',
'data': {
'volume_id': 1,
'target_iqn': 'iqn.2010-10.org.openstack:volume-00000001',
'target_portal': '127.0.0.1:3260,fake',
'target_lun': None,
'auth_method': 'CHAP',
'auth_method': 'fake',
'auth_method': 'fake',
}
}
def test_mountpoint_to_number(self):
cases = {
'sda': 0,
'sdp': 15,
'hda': 0,
'hdp': 15,
'vda': 0,
'xvda': 0,
'0': 0,
'10': 10,
'vdq': -1,
'sdq': -1,
'hdq': -1,
'xvdq': -1,
}
for (input, expected) in cases.iteritems():
func = volume_utils.VolumeHelper.mountpoint_to_number
actual = func(input)
self.assertEqual(actual, expected,
'%s yielded %s, not %s' % (input, actual, expected))
def test_parse_volume_info_raise_exception(self):
"""This shows how to test helper classes' methods."""
stubs.stubout_session(self.stubs, stubs.FakeSessionForVolumeTests)
session = xenapi_conn.XenAPISession('test_url', 'root', 'test_pass')
helper = volume_utils.VolumeHelper
helper.XenAPI = session.get_imported_xenapi()
vol = self._create_volume()
# oops, wrong mount point!
self.assertRaises(volume_utils.StorageError,
helper.parse_volume_info,
self._make_info(),
'dev/sd'
)
db.volume_destroy(context.get_admin_context(), vol['id'])
def test_attach_volume(self):
"""This shows how to test Ops classes' methods."""
stubs.stubout_session(self.stubs, stubs.FakeSessionForVolumeTests)
conn = xenapi_conn.get_connection(False)
volume = self._create_volume()
instance = db.instance_create(self.context, self.instance_values)
vm = xenapi_fake.create_vm(instance.name, 'Running')
result = conn.attach_volume(self._make_info(),
instance.name, '/dev/sdc')
# check that the VM has a VBD attached to it
# Get XenAPI record for VBD
vbds = xenapi_fake.get_all('VBD')
vbd = xenapi_fake.get_record('VBD', vbds[0])
vm_ref = vbd['VM']
self.assertEqual(vm_ref, vm)
def test_attach_volume_raise_exception(self):
"""This shows how to test when exceptions are raised."""
stubs.stubout_session(self.stubs,
stubs.FakeSessionForVolumeFailedTests)
conn = xenapi_conn.get_connection(False)
volume = self._create_volume()
instance = db.instance_create(self.context, self.instance_values)
xenapi_fake.create_vm(instance.name, 'Running')
self.assertRaises(exception.VolumeDriverNotFound,
conn.attach_volume,
{'driver_volume_type': 'nonexist'},
instance.name,
'/dev/sdc')
class XenAPIVMTestCase(test.TestCase):
"""Unit tests for VM operations."""
def setUp(self):
super(XenAPIVMTestCase, self).setUp()
self.network = importutils.import_object(FLAGS.network_manager)
self.flags(xenapi_connection_url='test_url',
xenapi_connection_password='test_pass',
instance_name_template='%d',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
xenapi_fake.reset()
xenapi_fake.create_local_srs()
xenapi_fake.create_local_pifs()
db_fakes.stub_out_db_instance_api(self.stubs)
xenapi_fake.create_network('fake', FLAGS.flat_network_bridge)
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
stubs.stubout_get_this_vm_uuid(self.stubs)
stubs.stubout_stream_disk(self.stubs)
stubs.stubout_is_vdi_pv(self.stubs)
stubs.stub_out_vm_methods(self.stubs)
glance_stubs.stubout_glance_client(self.stubs)
fake_utils.stub_out_utils_execute(self.stubs)
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
self.conn = xenapi_conn.get_connection(False)
def test_init_host(self):
session = xenapi_conn.XenAPISession('test_url', 'root', 'test_pass')
vm = vm_utils.get_this_vm_ref(session)
# Local root disk
vdi0 = xenapi_fake.create_vdi('compute', None)
vbd0 = xenapi_fake.create_vbd(vm, vdi0)
# Instance VDI
vdi1 = xenapi_fake.create_vdi('instance-aaaa', None,
other_config={'nova_instance_uuid': 'aaaa'})
vbd1 = xenapi_fake.create_vbd(vm, vdi1)
# Only looks like instance VDI
vdi2 = xenapi_fake.create_vdi('instance-bbbb', None)
vbd2 = xenapi_fake.create_vbd(vm, vdi2)
self.conn.init_host(None)
self.assertEquals(set(xenapi_fake.get_all('VBD')), set([vbd0, vbd2]))
def test_list_instances_0(self):
instances = self.conn.list_instances()
self.assertEquals(instances, [])
def test_get_rrd_server(self):
self.flags(xenapi_connection_url='myscheme://myaddress/')
server_info = vm_utils.get_rrd_server()
self.assertEqual(server_info[0], 'myscheme')
self.assertEqual(server_info[1], 'myaddress')
def test_get_diagnostics(self):
def fake_get_rrd(host, vm_uuid):
with open('xenapi/vm_rrd.xml') as f:
return re.sub(r'\s', '', f.read())
self.stubs.Set(vm_utils, 'get_rrd', fake_get_rrd)
fake_diagnostics = {
'vbd_xvdb_write': '0.0',
'memory_target': '10961792000.0000',
'memory_internal_free': '3612860.6020',
'memory': '10961792000.0000',
'vbd_xvda_write': '0.0',
'cpu0': '0.0110',
'vif_0_tx': '752.4007',
'vbd_xvda_read': '0.0',
'vif_0_rx': '4837.8805'
}
instance = self._create_instance()
expected = self.conn.get_diagnostics(instance)
self.assertDictMatch(fake_diagnostics, expected)
def test_instance_snapshot_fails_with_no_primary_vdi(self):
def create_bad_vbd(vm_ref, vdi_ref):
vbd_rec = {'VM': vm_ref,
'VDI': vdi_ref,
'userdevice': 'fake',
'currently_attached': False}
vbd_ref = xenapi_fake._create_object('VBD', vbd_rec)
xenapi_fake.after_VBD_create(vbd_ref, vbd_rec)
return vbd_ref
self.stubs.Set(xenapi_fake, 'create_vbd', create_bad_vbd)
stubs.stubout_instance_snapshot(self.stubs)
# Stubbing out firewall driver as previous stub sets alters
# xml rpc result parsing
stubs.stubout_firewall_driver(self.stubs, self.conn)
instance = self._create_instance()
name = "MySnapshot"
self.assertRaises(exception.NovaException, self.conn.snapshot,
self.context, instance, name)
def test_instance_snapshot(self):
stubs.stubout_instance_snapshot(self.stubs)
stubs.stubout_is_snapshot(self.stubs)
# Stubbing out firewall driver as previous stub sets alters
# xml rpc result parsing
stubs.stubout_firewall_driver(self.stubs, self.conn)
instance = self._create_instance()
name = "MySnapshot"
template_vm_ref = self.conn.snapshot(self.context, instance, name)
# Ensure VM was torn down
vm_labels = []
for vm_ref in xenapi_fake.get_all('VM'):
vm_rec = xenapi_fake.get_record('VM', vm_ref)
if not vm_rec["is_control_domain"]:
vm_labels.append(vm_rec["name_label"])
self.assertEquals(vm_labels, [instance.name])
# Ensure VBDs were torn down
vbd_labels = []
for vbd_ref in xenapi_fake.get_all('VBD'):
vbd_rec = xenapi_fake.get_record('VBD', vbd_ref)
vbd_labels.append(vbd_rec["vm_name_label"])
self.assertEquals(vbd_labels, [instance.name])
# Ensure VDIs were torn down
for vdi_ref in xenapi_fake.get_all('VDI'):
vdi_rec = xenapi_fake.get_record('VDI', vdi_ref)
name_label = vdi_rec["name_label"]
self.assert_(not name_label.endswith('snapshot'))
def create_vm_record(self, conn, os_type, name):
instances = conn.list_instances()
self.assertEquals(instances, [name])
# Get Nova record for VM
vm_info = conn.get_info({'name': name})
# Get XenAPI record for VM
vms = [rec for ref, rec
in xenapi_fake.get_all_records('VM').iteritems()
if not rec['is_control_domain']]
vm = vms[0]
self.vm_info = vm_info
self.vm = vm
def check_vm_record(self, conn, check_injection=False):
# Check that m1.large above turned into the right thing.
instance_type = db.instance_type_get_by_name(conn, 'm1.large')
mem_kib = long(instance_type['memory_mb']) << 10
mem_bytes = str(mem_kib << 10)
vcpus = instance_type['vcpus']
self.assertEquals(self.vm_info['max_mem'], mem_kib)
self.assertEquals(self.vm_info['mem'], mem_kib)
self.assertEquals(self.vm['memory_static_max'], mem_bytes)
self.assertEquals(self.vm['memory_dynamic_max'], mem_bytes)
self.assertEquals(self.vm['memory_dynamic_min'], mem_bytes)
self.assertEquals(self.vm['VCPUs_max'], str(vcpus))
self.assertEquals(self.vm['VCPUs_at_startup'], str(vcpus))
# Check that the VM is running according to Nova
self.assertEquals(self.vm_info['state'], power_state.RUNNING)
# Check that the VM is running according to XenAPI.
self.assertEquals(self.vm['power_state'], 'Running')
if check_injection:
xenstore_data = self.vm['xenstore_data']
self.assertEquals(xenstore_data['vm-data/hostname'], 'test')
key = 'vm-data/networking/DEADBEEF0000'
xenstore_value = xenstore_data[key]
tcpip_data = ast.literal_eval(xenstore_value)
self.assertEquals(tcpip_data,
{'broadcast': '192.168.0.255',
'dns': ['192.168.0.1'],
'gateway': '192.168.0.1',
'gateway_v6': 'dead:beef::1',
'ip6s': [{'enabled': '1',
'ip': 'dead:beef::dcad:beff:feef:0',
'netmask': '64'}],
'ips': [{'enabled': '1',
'ip': '192.168.0.100',
'netmask': '255.255.255.0'}],
'dhcp_server': '192.168.0.1',
'label': 'fake',
'mac': 'DE:AD:BE:EF:00:00',
'rxtx_cap': 3})
def check_vm_params_for_windows(self):
self.assertEquals(self.vm['platform']['nx'], 'true')
self.assertEquals(self.vm['HVM_boot_params'], {'order': 'dc'})
self.assertEquals(self.vm['HVM_boot_policy'], 'BIOS order')
# check that these are not set
self.assertEquals(self.vm['PV_args'], '')
self.assertEquals(self.vm['PV_bootloader'], '')
self.assertEquals(self.vm['PV_kernel'], '')
self.assertEquals(self.vm['PV_ramdisk'], '')
def check_vm_params_for_linux(self):
self.assertEquals(self.vm['platform']['nx'], 'false')
self.assertEquals(self.vm['PV_args'], '')
self.assertEquals(self.vm['PV_bootloader'], 'pygrub')
# check that these are not set
self.assertEquals(self.vm['PV_kernel'], '')
self.assertEquals(self.vm['PV_ramdisk'], '')
self.assertEquals(self.vm['HVM_boot_params'], {})
self.assertEquals(self.vm['HVM_boot_policy'], '')
def check_vm_params_for_linux_with_external_kernel(self):
self.assertEquals(self.vm['platform']['nx'], 'false')
self.assertEquals(self.vm['PV_args'], 'root=/dev/xvda1')
self.assertNotEquals(self.vm['PV_kernel'], '')
self.assertNotEquals(self.vm['PV_ramdisk'], '')
# check that these are not set
self.assertEquals(self.vm['HVM_boot_params'], {})
self.assertEquals(self.vm['HVM_boot_policy'], '')
def _list_vdis(self):
url = FLAGS.xenapi_connection_url
username = FLAGS.xenapi_connection_username
password = FLAGS.xenapi_connection_password
session = xenapi_conn.XenAPISession(url, username, password)
return session.call_xenapi('VDI.get_all')
def _check_vdis(self, start_list, end_list):
for vdi_ref in end_list:
if not vdi_ref in start_list:
vdi_rec = xenapi_fake.get_record('VDI', vdi_ref)
# If the cache is turned on then the base disk will be
# there even after the cleanup
if 'other_config' in vdi_rec:
if vdi_rec['other_config']['image-id'] is None:
self.fail('Found unexpected VDI:%s' % vdi_ref)
else:
self.fail('Found unexpected VDI:%s' % vdi_ref)
def _test_spawn(self, image_ref, kernel_id, ramdisk_id,
instance_type_id="3", os_type="linux",
hostname="test", architecture="x86-64", instance_id=1,
check_injection=False,
create_record=True, empty_dns=False):
if create_record:
instance_values = {'id': instance_id,
'project_id': self.project_id,
'user_id': self.user_id,
'image_ref': image_ref,
'kernel_id': kernel_id,
'ramdisk_id': ramdisk_id,
'root_gb': 20,
'instance_type_id': instance_type_id,
'os_type': os_type,
'hostname': hostname,
'architecture': architecture}
instance = db.instance_create(self.context, instance_values)
else:
instance = db.instance_get(self.context, instance_id)
network_info = [({'bridge': 'fa0', 'id': 0,
'injected': True,
'cidr': '192.168.0.0/24',
'cidr_v6': 'dead:beef::1/120',
},
{'broadcast': '192.168.0.255',
'dns': ['192.168.0.1'],
'gateway': '192.168.0.1',
'gateway_v6': 'dead:beef::1',
'ip6s': [{'enabled': '1',
'ip': 'dead:beef::dcad:beff:feef:0',
'netmask': '64'}],
'ips': [{'enabled': '1',
'ip': '192.168.0.100',
'netmask': '255.255.255.0'}],
'dhcp_server': '192.168.0.1',
'label': 'fake',
'mac': 'DE:AD:BE:EF:00:00',
'rxtx_cap': 3})]
if empty_dns:
network_info[0][1]['dns'] = []
# admin_pass isn't part of the DB model, but it does get set as
# an attribute for spawn to use
instance.admin_pass = 'herp'
image_meta = {'id': glance_stubs.FakeGlance.IMAGE_VHD,
'disk_format': 'vhd'}
self.conn.spawn(self.context, instance, image_meta, network_info)
self.create_vm_record(self.conn, os_type, instance['name'])
self.check_vm_record(self.conn, check_injection)
self.assertTrue(instance.os_type)
self.assertTrue(instance.architecture)
def test_spawn_empty_dns(self):
"""Test spawning with an empty dns list"""
self._test_spawn(glance_stubs.FakeGlance.IMAGE_VHD, None, None,
os_type="linux", architecture="x86-64",
empty_dns=True)
self.check_vm_params_for_linux()
def test_spawn_not_enough_memory(self):
self.assertRaises(exception.InsufficientFreeMemory,
self._test_spawn,
1, 2, 3, "4") # m1.xlarge
def test_spawn_fail_cleanup_1(self):
"""Simulates an error while downloading an image.
Verifies that VDIs created are properly cleaned up.
"""
vdi_recs_start = self._list_vdis()
stubs.stubout_fetch_image_glance_disk(self.stubs, raise_failure=True)
self.assertRaises(xenapi_fake.Failure,
self._test_spawn, 1, 2, 3)
# No additional VDI should be found.
vdi_recs_end = self._list_vdis()
self._check_vdis(vdi_recs_start, vdi_recs_end)
def test_spawn_fail_cleanup_2(self):
"""Simulates an error while creating VM record.
It verifies that VDIs created are properly cleaned up.
"""
vdi_recs_start = self._list_vdis()
stubs.stubout_create_vm(self.stubs)
self.assertRaises(xenapi_fake.Failure,
self._test_spawn, 1, 2, 3)
# No additional VDI should be found.
vdi_recs_end = self._list_vdis()
self._check_vdis(vdi_recs_start, vdi_recs_end)
@stub_vm_utils_with_vdi_attached_here
def test_spawn_raw_glance(self):
self._test_spawn(glance_stubs.FakeGlance.IMAGE_RAW, None, None)
self.check_vm_params_for_linux()
def test_spawn_vhd_glance_linux(self):
self._test_spawn(glance_stubs.FakeGlance.IMAGE_VHD, None, None,
os_type="linux", architecture="x86-64")
self.check_vm_params_for_linux()
def test_spawn_vhd_glance_swapdisk(self):
# Change the default host_call_plugin to one that'll return
# a swap disk
orig_func = stubs.FakeSessionForVMTests.host_call_plugin
_host_call_plugin = stubs.FakeSessionForVMTests.host_call_plugin_swap
stubs.FakeSessionForVMTests.host_call_plugin = _host_call_plugin
# Stubbing out firewall driver as previous stub sets a particular
# stub for async plugin calls
stubs.stubout_firewall_driver(self.stubs, self.conn)
try:
# We'll steal the above glance linux test
self.test_spawn_vhd_glance_linux()
finally:
# Make sure to put this back
stubs.FakeSessionForVMTests.host_call_plugin = orig_func
# We should have 2 VBDs.
self.assertEqual(len(self.vm['VBDs']), 2)
# Now test that we have 1.
self.tearDown()
self.setUp()
self.test_spawn_vhd_glance_linux()
self.assertEqual(len(self.vm['VBDs']), 1)
def test_spawn_vhd_glance_windows(self):
self._test_spawn(glance_stubs.FakeGlance.IMAGE_VHD, None, None,
os_type="windows", architecture="i386")
self.check_vm_params_for_windows()
def test_spawn_iso_glance(self):
self._test_spawn(glance_stubs.FakeGlance.IMAGE_ISO, None, None,
os_type="windows", architecture="i386")
self.check_vm_params_for_windows()
def test_spawn_glance(self):
stubs.stubout_fetch_image_glance_disk(self.stubs)
self._test_spawn(glance_stubs.FakeGlance.IMAGE_MACHINE,
glance_stubs.FakeGlance.IMAGE_KERNEL,
glance_stubs.FakeGlance.IMAGE_RAMDISK)
self.check_vm_params_for_linux_with_external_kernel()
def test_spawn_netinject_file(self):
self.flags(flat_injected=True)
db_fakes.stub_out_db_instance_api(self.stubs, injected=True)
self._tee_executed = False
def _tee_handler(cmd, **kwargs):
input = kwargs.get('process_input', None)
self.assertNotEqual(input, None)
config = [line.strip() for line in input.split("\n")]
# Find the start of eth0 configuration and check it
index = config.index('auto eth0')
self.assertEquals(config[index + 1:index + 8], [
'iface eth0 inet static',
'address 192.168.0.100',
'netmask 255.255.255.0',
'broadcast 192.168.0.255',
'gateway 192.168.0.1',
'dns-nameservers 192.168.0.1',
''])
self._tee_executed = True
return '', ''
fake_utils.fake_execute_set_repliers([
# Capture the tee .../etc/network/interfaces command
(r'tee.*interfaces', _tee_handler),
])
self._test_spawn(glance_stubs.FakeGlance.IMAGE_MACHINE,
glance_stubs.FakeGlance.IMAGE_KERNEL,
glance_stubs.FakeGlance.IMAGE_RAMDISK,
check_injection=True)
self.assertTrue(self._tee_executed)
def test_spawn_netinject_xenstore(self):
db_fakes.stub_out_db_instance_api(self.stubs, injected=True)
self._tee_executed = False
def _mount_handler(cmd, *ignore_args, **ignore_kwargs):
# When mounting, create real files under the mountpoint to simulate
# files in the mounted filesystem
# mount point will be the last item of the command list
self._tmpdir = cmd[len(cmd) - 1]
LOG.debug(_('Creating files in %s to simulate guest agent'),
self._tmpdir)
os.makedirs(os.path.join(self._tmpdir, 'usr', 'sbin'))
# Touch the file using open
open(os.path.join(self._tmpdir, 'usr', 'sbin',
'xe-update-networking'), 'w').close()
return '', ''
def _umount_handler(cmd, *ignore_args, **ignore_kwargs):
# Umount would normall make files in the m,ounted filesystem
# disappear, so do that here
LOG.debug(_('Removing simulated guest agent files in %s'),
self._tmpdir)
os.remove(os.path.join(self._tmpdir, 'usr', 'sbin',
'xe-update-networking'))
os.rmdir(os.path.join(self._tmpdir, 'usr', 'sbin'))
os.rmdir(os.path.join(self._tmpdir, 'usr'))
return '', ''
def _tee_handler(cmd, *ignore_args, **ignore_kwargs):
self._tee_executed = True
return '', ''
fake_utils.fake_execute_set_repliers([
(r'mount', _mount_handler),
(r'umount', _umount_handler),
(r'tee.*interfaces', _tee_handler)])
self._test_spawn(1, 2, 3, check_injection=True)
# tee must not run in this case, where an injection-capable
# guest agent is detected
self.assertFalse(self._tee_executed)
def test_spawn_vlanmanager(self):
self.flags(image_service='nova.image.glance.GlanceImageService',
network_manager='nova.network.manager.VlanManager',
vlan_interface='fake0')
def dummy(*args, **kwargs):
pass
self.stubs.Set(vmops.VMOps, '_create_vifs', dummy)
# Reset network table
xenapi_fake.reset_table('network')
# Instance id = 2 will use vlan network (see db/fakes.py)
ctxt = self.context.elevated()
instance = self._create_instance(2, False)
networks = self.network.db.network_get_all(ctxt)
for network in networks:
self.network.set_network_host(ctxt, network)
self.network.allocate_for_instance(ctxt,
instance_id=2,
instance_uuid="00000000-0000-0000-0000-000000000000",
host=FLAGS.host,
vpn=None,
rxtx_factor=3,
project_id=self.project_id)
self._test_spawn(glance_stubs.FakeGlance.IMAGE_MACHINE,
glance_stubs.FakeGlance.IMAGE_KERNEL,
glance_stubs.FakeGlance.IMAGE_RAMDISK,
instance_id=2,
create_record=False)
# TODO(salvatore-orlando): a complete test here would require
# a check for making sure the bridge for the VM's VIF is
# consistent with bridge specified in nova db
def test_spawn_with_network_qos(self):
self._create_instance()
for vif_ref in xenapi_fake.get_all('VIF'):
vif_rec = xenapi_fake.get_record('VIF', vif_ref)
self.assertEquals(vif_rec['qos_algorithm_type'], 'ratelimit')
self.assertEquals(vif_rec['qos_algorithm_params']['kbps'],
str(3 * 1024))
def test_rescue(self):
instance = self._create_instance()
session = xenapi_conn.XenAPISession('test_url', 'root', 'test_pass')
vm = vm_utils.VMHelper.lookup(session, instance.name)
vbd = xenapi_fake.create_vbd(vm, None)
conn = xenapi_conn.get_connection(False)
image_meta = {'id': glance_stubs.FakeGlance.IMAGE_VHD,
'disk_format': 'vhd'}
conn.rescue(self.context, instance, [], image_meta)
def test_unrescue(self):
instance = self._create_instance()
conn = xenapi_conn.get_connection(False)
# Unrescue expects the original instance to be powered off
conn.power_off(instance)
rescue_vm = xenapi_fake.create_vm(instance.name + '-rescue', 'Running')
conn.unrescue(instance, None)
def test_unrescue_not_in_rescue(self):
instance = self._create_instance()
conn = xenapi_conn.get_connection(False)
# Ensure that it will not unrescue a non-rescued instance.
self.assertRaises(exception.InstanceNotInRescueMode, conn.unrescue,
instance, None)
def test_finish_revert_migration(self):
instance = self._create_instance()
class VMOpsMock():
def __init__(self):
self.finish_revert_migration_called = False
def finish_revert_migration(self, instance):
self.finish_revert_migration_called = True
conn = xenapi_conn.get_connection(False)
conn._vmops = VMOpsMock()
conn.finish_revert_migration(instance, None)
self.assertTrue(conn._vmops.finish_revert_migration_called)
def _create_instance(self, instance_id=1, spawn=True):
"""Creates and spawns a test instance."""
instance_values = {
'id': instance_id,
'project_id': self.project_id,
'user_id': self.user_id,
'image_ref': 1,
'kernel_id': 2,
'ramdisk_id': 3,
'root_gb': 20,
'instance_type_id': '3', # m1.large
'os_type': 'linux',
'architecture': 'x86-64'}
instance = db.instance_create(self.context, instance_values)
network_info = [({'bridge': 'fa0', 'id': 0,
'injected': False,
'cidr': '192.168.0.0/24',
'cidr_v6': 'dead:beef::1/120',
},
{'broadcast': '192.168.0.255',
'dns': ['192.168.0.1'],
'gateway': '192.168.0.1',
'gateway_v6': 'dead:beef::1',
'ip6s': [{'enabled': '1',
'ip': 'dead:beef::dcad:beff:feef:0',
'netmask': '64'}],
'ips': [{'enabled': '1',
'ip': '192.168.0.100',
'netmask': '255.255.255.0'}],
'dhcp_server': '192.168.0.1',
'label': 'fake',
'mac': 'DE:AD:BE:EF:00:00',
'rxtx_cap': 3})]
image_meta = {'id': glance_stubs.FakeGlance.IMAGE_VHD,
'disk_format': 'vhd'}
if spawn:
instance.admin_pass = 'herp'
self.conn.spawn(self.context, instance, image_meta, network_info)
return instance
class XenAPIDiffieHellmanTestCase(test.TestCase):
"""Unit tests for Diffie-Hellman code."""
def setUp(self):
super(XenAPIDiffieHellmanTestCase, self).setUp()
self.alice = vmops.SimpleDH()
self.bob = vmops.SimpleDH()
def test_shared(self):
alice_pub = self.alice.get_public()
bob_pub = self.bob.get_public()
alice_shared = self.alice.compute_shared(bob_pub)
bob_shared = self.bob.compute_shared(alice_pub)
self.assertEquals(alice_shared, bob_shared)
def _test_encryption(self, message):
enc = self.alice.encrypt(message)
self.assertFalse(enc.endswith('\n'))
dec = self.bob.decrypt(enc)
self.assertEquals(dec, message)
def test_encrypt_simple_message(self):
self._test_encryption('This is a simple message.')
def test_encrypt_message_with_newlines_at_end(self):
self._test_encryption('This message has a newline at the end.\n')
def test_encrypt_many_newlines_at_end(self):
self._test_encryption('Message with lotsa newlines.\n\n\n')
def test_encrypt_newlines_inside_message(self):
self._test_encryption('Message\nwith\ninterior\nnewlines.')
def test_encrypt_with_leading_newlines(self):
self._test_encryption('\n\nMessage with leading newlines.')
def test_encrypt_really_long_message(self):
self._test_encryption(''.join(['abcd' for i in xrange(1024)]))
class XenAPIMigrateInstance(test.TestCase):
"""Unit test for verifying migration-related actions."""
def setUp(self):
super(XenAPIMigrateInstance, self).setUp()
self.flags(target_host='127.0.0.1',
xenapi_connection_url='test_url',
xenapi_connection_password='test_pass',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
db_fakes.stub_out_db_instance_api(self.stubs)
xenapi_fake.reset()
xenapi_fake.create_network('fake', FLAGS.flat_network_bridge)
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
self.instance_values = {'id': 1,
'project_id': self.project_id,
'user_id': self.user_id,
'image_ref': 1,
'kernel_id': None,
'ramdisk_id': None,
'root_gb': 5,
'instance_type_id': '3', # m1.large
'os_type': 'linux',
'architecture': 'x86-64'}
migration_values = {
'source_compute': 'nova-compute',
'dest_compute': 'nova-compute',
'dest_host': '10.127.5.114',
'status': 'post-migrating',
'instance_uuid': '15f23e6a-cc6e-4d22-b651-d9bdaac316f7',
'old_instance_type_id': 5,
'new_instance_type_id': 1
}
self.migration = db.migration_create(
context.get_admin_context(), migration_values)
fake_utils.stub_out_utils_execute(self.stubs)
stubs.stub_out_migration_methods(self.stubs)
stubs.stubout_get_this_vm_uuid(self.stubs)
glance_stubs.stubout_glance_client(self.stubs)
def test_resize_xenserver_6(self):
instance = db.instance_create(self.context, self.instance_values)
called = {'resize': False}
def fake_vdi_resize(*args, **kwargs):
called['resize'] = True
self.stubs.Set(stubs.FakeSessionForVMTests,
"VDI_resize", fake_vdi_resize)
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests,
product_version=(6, 0, 0))
conn = xenapi_conn.get_connection(False)
vdi_ref = xenapi_fake.create_vdi('hurr', 'fake')
vdi_uuid = xenapi_fake.get_record('VDI', vdi_ref)['uuid']
conn._vmops._resize_instance(instance, vdi_uuid)
self.assertEqual(called['resize'], True)
def test_migrate_disk_and_power_off(self):
instance = db.instance_create(self.context, self.instance_values)
xenapi_fake.create_vm(instance.name, 'Running')
instance_type = db.instance_type_get_by_name(self.context, 'm1.large')
conn = xenapi_conn.get_connection(False)
conn.migrate_disk_and_power_off(self.context, instance,
'127.0.0.1', instance_type, None)
def test_migrate_disk_and_power_off_passes_exceptions(self):
instance = db.instance_create(self.context, self.instance_values)
xenapi_fake.create_vm(instance.name, 'Running')
instance_type = db.instance_type_get_by_name(self.context, 'm1.large')
def fake_raise(*args, **kwargs):
raise exception.MigrationError(reason='test failure')
self.stubs.Set(vmops.VMOps, "_migrate_vhd", fake_raise)
conn = xenapi_conn.get_connection(False)
self.assertRaises(exception.MigrationError,
conn.migrate_disk_and_power_off,
self.context, instance,
'127.0.0.1', instance_type, None)
def test_revert_migrate(self):
instance = db.instance_create(self.context, self.instance_values)
self.called = False
self.fake_vm_start_called = False
self.fake_finish_revert_migration_called = False
def fake_vm_start(*args, **kwargs):
self.fake_vm_start_called = True
def fake_vdi_resize(*args, **kwargs):
self.called = True
def fake_finish_revert_migration(*args, **kwargs):
self.fake_finish_revert_migration_called = True
self.stubs.Set(stubs.FakeSessionForVMTests,
"VDI_resize_online", fake_vdi_resize)
self.stubs.Set(vmops.VMOps, '_start', fake_vm_start)
self.stubs.Set(vmops.VMOps, 'finish_revert_migration',
fake_finish_revert_migration)
conn = xenapi_conn.get_connection(False)
network_info = [({'bridge': 'fa0', 'id': 0, 'injected': False},
{'broadcast': '192.168.0.255',
'dns': ['192.168.0.1'],
'gateway': '192.168.0.1',
'gateway_v6': 'dead:beef::1',
'ip6s': [{'enabled': '1',
'ip': 'dead:beef::dcad:beff:feef:0',
'netmask': '64'}],
'ips': [{'enabled': '1',
'ip': '192.168.0.100',
'netmask': '255.255.255.0'}],
'label': 'fake',
'mac': 'DE:AD:BE:EF:00:00',
'rxtx_cap': 3})]
image_meta = {'id': instance.image_ref, 'disk_format': 'vhd'}
base = xenapi_fake.create_vdi('hurr', 'fake')
base_uuid = xenapi_fake.get_record('VDI', base)['uuid']
cow = xenapi_fake.create_vdi('durr', 'fake')
cow_uuid = xenapi_fake.get_record('VDI', cow)['uuid']
conn.finish_migration(self.context, self.migration, instance,
dict(base_copy=base_uuid, cow=cow_uuid),
network_info, image_meta, resize_instance=True)
self.assertEqual(self.called, True)
self.assertEqual(self.fake_vm_start_called, True)
conn.finish_revert_migration(instance, network_info)
self.assertEqual(self.fake_finish_revert_migration_called, True)
def test_finish_migrate(self):
instance = db.instance_create(self.context, self.instance_values)
self.called = False
self.fake_vm_start_called = False
def fake_vm_start(*args, **kwargs):
self.fake_vm_start_called = True
def fake_vdi_resize(*args, **kwargs):
self.called = True
self.stubs.Set(vmops.VMOps, '_start', fake_vm_start)
self.stubs.Set(stubs.FakeSessionForVMTests,
"VDI_resize_online", fake_vdi_resize)
conn = xenapi_conn.get_connection(False)
network_info = [({'bridge': 'fa0', 'id': 0, 'injected': False},
{'broadcast': '192.168.0.255',
'dns': ['192.168.0.1'],
'gateway': '192.168.0.1',
'gateway_v6': 'dead:beef::1',
'ip6s': [{'enabled': '1',
'ip': 'dead:beef::dcad:beff:feef:0',
'netmask': '64'}],
'ips': [{'enabled': '1',
'ip': '192.168.0.100',
'netmask': '255.255.255.0'}],
'label': 'fake',
'mac': 'DE:AD:BE:EF:00:00',
'rxtx_cap': 3})]
image_meta = {'id': instance.image_ref, 'disk_format': 'vhd'}
conn.finish_migration(self.context, self.migration, instance,
dict(base_copy='hurr', cow='durr'),
network_info, image_meta, resize_instance=True)
self.assertEqual(self.called, True)
self.assertEqual(self.fake_vm_start_called, True)
def test_finish_migrate_no_local_storage(self):
tiny_type = instance_types.get_instance_type_by_name('m1.tiny')
tiny_type_id = tiny_type['id']
self.instance_values.update({'instance_type_id': tiny_type_id,
'root_gb': 0})
instance = db.instance_create(self.context, self.instance_values)
def fake_vdi_resize(*args, **kwargs):
raise Exception("This shouldn't be called")
self.stubs.Set(stubs.FakeSessionForVMTests,
"VDI_resize_online", fake_vdi_resize)
conn = xenapi_conn.get_connection(False)
network_info = [({'bridge': 'fa0', 'id': 0, 'injected': False},
{'broadcast': '192.168.0.255',
'dns': ['192.168.0.1'],
'gateway': '192.168.0.1',
'gateway_v6': 'dead:beef::1',
'ip6s': [{'enabled': '1',
'ip': 'dead:beef::dcad:beff:feef:0',
'netmask': '64'}],
'ips': [{'enabled': '1',
'ip': '192.168.0.100',
'netmask': '255.255.255.0'}],
'label': 'fake',
'mac': 'DE:AD:BE:EF:00:00',
'rxtx_cap': 3})]
image_meta = {'id': instance.image_ref, 'disk_format': 'vhd'}
conn.finish_migration(self.context, self.migration, instance,
dict(base_copy='hurr', cow='durr'),
network_info, image_meta, resize_instance=True)
def test_finish_migrate_no_resize_vdi(self):
instance = db.instance_create(self.context, self.instance_values)
def fake_vdi_resize(*args, **kwargs):
raise Exception("This shouldn't be called")
self.stubs.Set(stubs.FakeSessionForVMTests,
"VDI_resize_online", fake_vdi_resize)
conn = xenapi_conn.get_connection(False)
network_info = [({'bridge': 'fa0', 'id': 0, 'injected': False},
{'broadcast': '192.168.0.255',
'dns': ['192.168.0.1'],
'gateway': '192.168.0.1',
'gateway_v6': 'dead:beef::1',
'ip6s': [{'enabled': '1',
'ip': 'dead:beef::dcad:beff:feef:0',
'netmask': '64'}],
'ips': [{'enabled': '1',
'ip': '192.168.0.100',
'netmask': '255.255.255.0'}],
'label': 'fake',
'mac': 'DE:AD:BE:EF:00:00',
'rxtx_cap': 3})]
# Resize instance would be determined by the compute call
image_meta = {'id': instance.image_ref, 'disk_format': 'vhd'}
conn.finish_migration(self.context, self.migration, instance,
dict(base_copy='hurr', cow='durr'),
network_info, image_meta, resize_instance=False)
class XenAPIImageTypeTestCase(test.TestCase):
"""Test ImageType class."""
def test_to_string(self):
"""Can convert from type id to type string."""
self.assertEquals(
vm_utils.ImageType.to_string(vm_utils.ImageType.KERNEL),
vm_utils.ImageType.KERNEL_STR)
def test_from_string(self):
"""Can convert from string to type id."""
self.assertEquals(
vm_utils.ImageType.from_string(vm_utils.ImageType.KERNEL_STR),
vm_utils.ImageType.KERNEL)
class XenAPIDetermineDiskImageTestCase(test.TestCase):
"""Unit tests for code that detects the ImageType."""
def setUp(self):
super(XenAPIDetermineDiskImageTestCase, self).setUp()
glance_stubs.stubout_glance_client(self.stubs)
class FakeInstance(object):
pass
self.fake_instance = FakeInstance()
self.fake_instance.id = 42
self.fake_instance.os_type = 'linux'
self.fake_instance.architecture = 'x86-64'
def assert_disk_type(self, image_meta, expected_disk_type):
actual = vm_utils.VMHelper.determine_disk_image_type(image_meta)
self.assertEqual(expected_disk_type, actual)
def test_machine(self):
image_meta = {'id': 'a', 'disk_format': 'ami'}
self.assert_disk_type(image_meta, vm_utils.ImageType.DISK)
def test_raw(self):
image_meta = {'id': 'a', 'disk_format': 'raw'}
self.assert_disk_type(image_meta, vm_utils.ImageType.DISK_RAW)
def test_vhd(self):
image_meta = {'id': 'a', 'disk_format': 'vhd'}
self.assert_disk_type(image_meta, vm_utils.ImageType.DISK_VHD)
class CompareVersionTestCase(test.TestCase):
def test_less_than(self):
"""Test that cmp_version compares a as less than b"""
self.assertTrue(vmops.cmp_version('1.2.3.4', '1.2.3.5') < 0)
def test_greater_than(self):
"""Test that cmp_version compares a as greater than b"""
self.assertTrue(vmops.cmp_version('1.2.3.5', '1.2.3.4') > 0)
def test_equal(self):
"""Test that cmp_version compares a as equal to b"""
self.assertTrue(vmops.cmp_version('1.2.3.4', '1.2.3.4') == 0)
def test_non_lexical(self):
"""Test that cmp_version compares non-lexically"""
self.assertTrue(vmops.cmp_version('1.2.3.10', '1.2.3.4') > 0)
def test_length(self):
"""Test that cmp_version compares by length as last resort"""
self.assertTrue(vmops.cmp_version('1.2.3', '1.2.3.4') < 0)
class XenAPIHostTestCase(test.TestCase):
"""Tests HostState, which holds metrics from XenServer that get
reported back to the Schedulers."""
def setUp(self):
super(XenAPIHostTestCase, self).setUp()
self.flags(xenapi_connection_url='test_url',
xenapi_connection_password='test_pass')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
xenapi_fake.reset()
xenapi_fake.create_local_srs()
self.conn = xenapi_conn.get_connection(False)
def test_host_state(self):
stats = self.conn.get_host_stats()
self.assertEquals(stats['disk_total'], 10000)
self.assertEquals(stats['disk_used'], 20000)
self.assertEquals(stats['host_memory_total'], 10)
self.assertEquals(stats['host_memory_overhead'], 20)
self.assertEquals(stats['host_memory_free'], 30)
self.assertEquals(stats['host_memory_free_computed'], 40)
def _test_host_action(self, method, action, expected=None):
result = method('host', action)
if not expected:
expected = action
self.assertEqual(result, expected)
def test_host_reboot(self):
self._test_host_action(self.conn.host_power_action, 'reboot')
def test_host_shutdown(self):
self._test_host_action(self.conn.host_power_action, 'shutdown')
def test_host_startup(self):
self.assertRaises(NotImplementedError,
self.conn.host_power_action, 'host', 'startup')
def test_host_maintenance_on(self):
self._test_host_action(self.conn.host_maintenance_mode,
True, 'on_maintenance')
def test_host_maintenance_off(self):
self._test_host_action(self.conn.host_maintenance_mode,
False, 'off_maintenance')
def test_set_enable_host_enable(self):
self._test_host_action(self.conn.set_host_enabled, True, 'enabled')
def test_set_enable_host_disable(self):
self._test_host_action(self.conn.set_host_enabled, False, 'disabled')
class XenAPIAutoDiskConfigTestCase(test.TestCase):
def setUp(self):
super(XenAPIAutoDiskConfigTestCase, self).setUp()
self.flags(target_host='127.0.0.1',
xenapi_connection_url='test_url',
xenapi_connection_password='test_pass',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
xenapi_fake.reset()
self.conn = xenapi_conn.get_connection(False)
self.user_id = 'fake'
self.project_id = 'fake'
self.instance_values = {'id': 1,
'project_id': self.project_id,
'user_id': self.user_id,
'image_ref': 1,
'kernel_id': 2,
'ramdisk_id': 3,
'root_gb': 20,
'instance_type_id': '3', # m1.large
'os_type': 'linux',
'architecture': 'x86-64'}
self.context = context.RequestContext(self.user_id, self.project_id)
@classmethod
def fake_create_vbd(cls, session, vm_ref, vdi_ref, userdevice,
vbd_type='disk', read_only=False, bootable=True):
pass
self.stubs.Set(vm_utils.VMHelper,
"create_vbd",
fake_create_vbd)
def assertIsPartitionCalled(self, called):
marker = {"partition_called": False}
def fake_resize_part_and_fs(dev, start, old, new):
marker["partition_called"] = True
self.stubs.Set(vm_utils, "_resize_part_and_fs",
fake_resize_part_and_fs)
instance = db.instance_create(self.context, self.instance_values)
disk_image_type = vm_utils.ImageType.DISK_VHD
vm_ref = "blah"
first_vdi_ref = "blah"
vdis = ["blah"]
self.conn._vmops._attach_disks(
instance, disk_image_type, vm_ref, first_vdi_ref, vdis)
self.assertEqual(marker["partition_called"], called)
def test_instance_not_auto_disk_config(self):
"""Should not partition unless instance is marked as
auto_disk_config.
"""
self.instance_values['auto_disk_config'] = False
self.assertIsPartitionCalled(False)
@stub_vm_utils_with_vdi_attached_here
def test_instance_auto_disk_config_doesnt_pass_fail_safes(self):
"""Should not partition unless fail safes pass"""
self.instance_values['auto_disk_config'] = True
def fake_get_partitions(dev):
return [(1, 0, 100, 'ext4'), (2, 100, 200, 'ext4')]
self.stubs.Set(vm_utils, "_get_partitions",
fake_get_partitions)
self.assertIsPartitionCalled(False)
@stub_vm_utils_with_vdi_attached_here
def test_instance_auto_disk_config_passes_fail_safes(self):
"""Should partition if instance is marked as auto_disk_config=True and
virt-layer specific fail-safe checks pass.
"""
self.instance_values['auto_disk_config'] = True
def fake_get_partitions(dev):
return [(1, 0, 100, 'ext4')]
self.stubs.Set(vm_utils, "_get_partitions",
fake_get_partitions)
self.assertIsPartitionCalled(True)
class XenAPIGenerateLocal(test.TestCase):
"""Test generating of local disks, like swap and ephemeral"""
def setUp(self):
super(XenAPIGenerateLocal, self).setUp()
self.flags(target_host='127.0.0.1',
xenapi_connection_url='test_url',
xenapi_connection_password='test_pass',
xenapi_generate_swap=True,
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
db_fakes.stub_out_db_instance_api(self.stubs)
xenapi_fake.reset()
self.conn = xenapi_conn.get_connection(False)
self.user_id = 'fake'
self.project_id = 'fake'
self.instance_values = {'id': 1,
'project_id': self.project_id,
'user_id': self.user_id,
'image_ref': 1,
'kernel_id': 2,
'ramdisk_id': 3,
'root_gb': 20,
'instance_type_id': '3', # m1.large
'os_type': 'linux',
'architecture': 'x86-64'}
self.context = context.RequestContext(self.user_id, self.project_id)
@classmethod
def fake_create_vbd(cls, session, vm_ref, vdi_ref, userdevice,
vbd_type='disk', read_only=False, bootable=True):
pass
self.stubs.Set(vm_utils.VMHelper,
"create_vbd",
fake_create_vbd)
def assertCalled(self, instance):
disk_image_type = vm_utils.ImageType.DISK_VHD
vm_ref = "blah"
first_vdi_ref = "blah"
vdis = ["blah"]
self.called = False
self.conn._vmops._attach_disks(instance, disk_image_type,
vm_ref, first_vdi_ref, vdis)
self.assertTrue(self.called)
def test_generate_swap(self):
"""Test swap disk generation."""
instance = db.instance_create(self.context, self.instance_values)
instance = db.instance_update(self.context, instance['id'],
{'instance_type_id': 5})
@classmethod
def fake_generate_swap(cls, *args, **kwargs):
self.called = True
self.stubs.Set(vm_utils.VMHelper, 'generate_swap',
fake_generate_swap)
self.assertCalled(instance)
def test_generate_ephemeral(self):
"""Test ephemeral disk generation."""
instance = db.instance_create(self.context, self.instance_values)
instance = db.instance_update(self.context, instance['id'],
{'instance_type_id': 4})
@classmethod
def fake_generate_ephemeral(cls, *args):
self.called = True
self.stubs.Set(vm_utils.VMHelper, 'generate_ephemeral',
fake_generate_ephemeral)
self.assertCalled(instance)
class XenAPIBWUsageTestCase(test.TestCase):
def setUp(self):
super(XenAPIBWUsageTestCase, self).setUp()
self.stubs.Set(vm_utils.VMHelper, "compile_metrics",
XenAPIBWUsageTestCase._fake_compile_metrics)
self.flags(target_host='127.0.0.1',
xenapi_connection_url='test_url',
xenapi_connection_password='test_pass',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
xenapi_fake.reset()
self.conn = xenapi_conn.get_connection(False)
@classmethod
def _fake_compile_metrics(cls, start_time, stop_time=None):
raise exception.CouldNotFetchMetrics()
def test_get_all_bw_usage_in_failure_case(self):
"""Test that get_all_bw_usage returns an empty list when metrics
compilation failed. c.f. bug #910045.
"""
class testinstance(object):
def __init__(self):
self.name = "instance-0001"
self.uuid = "1-2-3-4-5"
result = self.conn.get_all_bw_usage([testinstance()],
datetime.datetime.utcnow())
self.assertEqual(result, [])
# TODO(salvatore-orlando): this class and
# nova.tests.test_libvirt.IPTablesFirewallDriverTestCase share a lot of code.
# Consider abstracting common code in a base class for firewall driver testing.
class XenAPIDom0IptablesFirewallTestCase(test.TestCase):
_in_nat_rules = [
'# Generated by iptables-save v1.4.10 on Sat Feb 19 00:03:19 2011',
'*nat',
':PREROUTING ACCEPT [1170:189210]',
':INPUT ACCEPT [844:71028]',
':OUTPUT ACCEPT [5149:405186]',
':POSTROUTING ACCEPT [5063:386098]',
]
_in_filter_rules = [
'# Generated by iptables-save v1.4.4 on Mon Dec 6 11:54:13 2010',
'*filter',
':INPUT ACCEPT [969615:281627771]',
':FORWARD ACCEPT [0:0]',
':OUTPUT ACCEPT [915599:63811649]',
':nova-block-ipv4 - [0:0]',
'-A INPUT -i virbr0 -p tcp -m tcp --dport 67 -j ACCEPT ',
'-A FORWARD -d 192.168.122.0/24 -o virbr0 -m state --state RELATED'
',ESTABLISHED -j ACCEPT ',
'-A FORWARD -s 192.168.122.0/24 -i virbr0 -j ACCEPT ',
'-A FORWARD -i virbr0 -o virbr0 -j ACCEPT ',
'-A FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable ',
'-A FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable ',
'COMMIT',
'# Completed on Mon Dec 6 11:54:13 2010',
]
_in6_filter_rules = [
'# Generated by ip6tables-save v1.4.4 on Tue Jan 18 23:47:56 2011',
'*filter',
':INPUT ACCEPT [349155:75810423]',
':FORWARD ACCEPT [0:0]',
':OUTPUT ACCEPT [349256:75777230]',
'COMMIT',
'# Completed on Tue Jan 18 23:47:56 2011',
]
def setUp(self):
super(XenAPIDom0IptablesFirewallTestCase, self).setUp()
self.flags(xenapi_connection_url='test_url',
xenapi_connection_password='test_pass',
instance_name_template='%d',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
xenapi_fake.reset()
xenapi_fake.create_local_srs()
xenapi_fake.create_local_pifs()
self.user_id = 'mappin'
self.project_id = 'fake'
stubs.stubout_session(self.stubs, stubs.FakeSessionForFirewallTests,
test_case=self)
self.context = context.RequestContext(self.user_id, self.project_id)
self.network = importutils.import_object(FLAGS.network_manager)
self.conn = xenapi_conn.get_connection(False)
self.fw = self.conn._vmops.firewall_driver
def _create_instance_ref(self):
return db.instance_create(self.context,
{'user_id': self.user_id,
'project_id': self.project_id,
'instance_type_id': 1})
def _create_test_security_group(self):
admin_ctxt = context.get_admin_context()
secgroup = db.security_group_create(admin_ctxt,
{'user_id': self.user_id,
'project_id': self.project_id,
'name': 'testgroup',
'description': 'test group'})
db.security_group_rule_create(admin_ctxt,
{'parent_group_id': secgroup['id'],
'protocol': 'icmp',
'from_port': -1,
'to_port': -1,
'cidr': '192.168.11.0/24'})
db.security_group_rule_create(admin_ctxt,
{'parent_group_id': secgroup['id'],
'protocol': 'icmp',
'from_port': 8,
'to_port': -1,
'cidr': '192.168.11.0/24'})
db.security_group_rule_create(admin_ctxt,
{'parent_group_id': secgroup['id'],
'protocol': 'tcp',
'from_port': 80,
'to_port': 81,
'cidr': '192.168.10.0/24'})
return secgroup
def _validate_security_group(self):
in_rules = filter(lambda l: not l.startswith('#'),
self._in_filter_rules)
for rule in in_rules:
if not 'nova' in rule:
self.assertTrue(rule in self._out_rules,
'Rule went missing: %s' % rule)
instance_chain = None
for rule in self._out_rules:
# This is pretty crude, but it'll do for now
# last two octets change
if re.search('-d 192.168.[0-9]{1,3}.[0-9]{1,3} -j', rule):
instance_chain = rule.split(' ')[-1]
break
self.assertTrue(instance_chain, "The instance chain wasn't added")
security_group_chain = None
for rule in self._out_rules:
# This is pretty crude, but it'll do for now
if '-A %s -j' % instance_chain in rule:
security_group_chain = rule.split(' ')[-1]
break
self.assertTrue(security_group_chain,
"The security group chain wasn't added")
regex = re.compile('-A .* -j ACCEPT -p icmp -s 192.168.11.0/24')
self.assertTrue(len(filter(regex.match, self._out_rules)) > 0,
"ICMP acceptance rule wasn't added")
regex = re.compile('-A .* -j ACCEPT -p icmp -m icmp --icmp-type 8'
' -s 192.168.11.0/24')
self.assertTrue(len(filter(regex.match, self._out_rules)) > 0,
"ICMP Echo Request acceptance rule wasn't added")
regex = re.compile('-A .* -j ACCEPT -p tcp --dport 80:81'
' -s 192.168.10.0/24')
self.assertTrue(len(filter(regex.match, self._out_rules)) > 0,
"TCP port 80/81 acceptance rule wasn't added")
def test_static_filters(self):
instance_ref = self._create_instance_ref()
src_instance_ref = self._create_instance_ref()
admin_ctxt = context.get_admin_context()
secgroup = self._create_test_security_group()
src_secgroup = db.security_group_create(admin_ctxt,
{'user_id': self.user_id,
'project_id': self.project_id,
'name': 'testsourcegroup',
'description': 'src group'})
db.security_group_rule_create(admin_ctxt,
{'parent_group_id': secgroup['id'],
'protocol': 'tcp',
'from_port': 80,
'to_port': 81,
'group_id': src_secgroup['id']})
db.instance_add_security_group(admin_ctxt, instance_ref['uuid'],
secgroup['id'])
db.instance_add_security_group(admin_ctxt, src_instance_ref['uuid'],
src_secgroup['id'])
instance_ref = db.instance_get(admin_ctxt, instance_ref['id'])
src_instance_ref = db.instance_get(admin_ctxt, src_instance_ref['id'])
network_model = fake_network.fake_get_instance_nw_info(self.stubs,
1, spectacular=True)
fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs,
lambda *a, **kw: network_model)
network_info = compute_utils.legacy_network_info(network_model)
self.fw.prepare_instance_filter(instance_ref, network_info)
self.fw.apply_instance_filter(instance_ref, network_info)
self._validate_security_group()
# Extra test for TCP acceptance rules
for ip in network_model.fixed_ips():
if ip['version'] != 4:
continue
regex = re.compile('-A .* -j ACCEPT -p tcp'
' --dport 80:81 -s %s' % ip['address'])
self.assertTrue(len(filter(regex.match, self._out_rules)) > 0,
"TCP port 80/81 acceptance rule wasn't added")
db.instance_destroy(admin_ctxt, instance_ref['id'])
def test_filters_for_instance_with_ip_v6(self):
self.flags(use_ipv6=True)
network_info = fake_network.fake_get_instance_nw_info(self.stubs, 1)
rulesv4, rulesv6 = self.fw._filters_for_instance("fake", network_info)
self.assertEquals(len(rulesv4), 2)
self.assertEquals(len(rulesv6), 1)
def test_filters_for_instance_without_ip_v6(self):
self.flags(use_ipv6=False)
network_info = fake_network.fake_get_instance_nw_info(self.stubs, 1)
rulesv4, rulesv6 = self.fw._filters_for_instance("fake", network_info)
self.assertEquals(len(rulesv4), 2)
self.assertEquals(len(rulesv6), 0)
def test_multinic_iptables(self):
ipv4_rules_per_addr = 1
ipv4_addr_per_network = 2
ipv6_rules_per_addr = 1
ipv6_addr_per_network = 1
networks_count = 5
instance_ref = self._create_instance_ref()
_get_instance_nw_info = fake_network.fake_get_instance_nw_info
network_info = _get_instance_nw_info(self.stubs,
networks_count,
ipv4_addr_per_network)
ipv4_len = len(self.fw.iptables.ipv4['filter'].rules)
ipv6_len = len(self.fw.iptables.ipv6['filter'].rules)
inst_ipv4, inst_ipv6 = self.fw.instance_rules(instance_ref,
network_info)
self.fw.prepare_instance_filter(instance_ref, network_info)
ipv4 = self.fw.iptables.ipv4['filter'].rules
ipv6 = self.fw.iptables.ipv6['filter'].rules
ipv4_network_rules = len(ipv4) - len(inst_ipv4) - ipv4_len
ipv6_network_rules = len(ipv6) - len(inst_ipv6) - ipv6_len
self.assertEquals(ipv4_network_rules,
ipv4_rules_per_addr * ipv4_addr_per_network * networks_count)
self.assertEquals(ipv6_network_rules,
ipv6_rules_per_addr * ipv6_addr_per_network * networks_count)
def test_do_refresh_security_group_rules(self):
admin_ctxt = context.get_admin_context()
instance_ref = self._create_instance_ref()
network_info = fake_network.fake_get_instance_nw_info(self.stubs, 1, 1)
secgroup = self._create_test_security_group()
db.instance_add_security_group(admin_ctxt, instance_ref['uuid'],
secgroup['id'])
self.fw.prepare_instance_filter(instance_ref, network_info)
self.fw.instances[instance_ref['id']] = instance_ref
self._validate_security_group()
# add a rule to the security group
db.security_group_rule_create(admin_ctxt,
{'parent_group_id': secgroup['id'],
'protocol': 'udp',
'from_port': 200,
'to_port': 299,
'cidr': '192.168.99.0/24'})
#validate the extra rule
self.fw.refresh_security_group_rules(secgroup)
regex = re.compile('-A .* -j ACCEPT -p udp --dport 200:299'
' -s 192.168.99.0/24')
self.assertTrue(len(filter(regex.match, self._out_rules)) > 0,
"Rules were not updated properly."
"The rule for UDP acceptance is missing")
def test_provider_firewall_rules(self):
# setup basic instance data
instance_ref = self._create_instance_ref()
# FRAGILE: as in libvirt tests
# peeks at how the firewall names chains
chain_name = 'inst-%s' % instance_ref['id']
network_info = fake_network.fake_get_instance_nw_info(self.stubs, 1, 1)
self.fw.prepare_instance_filter(instance_ref, network_info)
self.assertTrue('provider' in self.fw.iptables.ipv4['filter'].chains)
rules = [rule for rule in self.fw.iptables.ipv4['filter'].rules
if rule.chain == 'provider']
self.assertEqual(0, len(rules))
admin_ctxt = context.get_admin_context()
# add a rule and send the update message, check for 1 rule
provider_fw0 = db.provider_fw_rule_create(admin_ctxt,
{'protocol': 'tcp',
'cidr': '10.99.99.99/32',
'from_port': 1,
'to_port': 65535})
self.fw.refresh_provider_fw_rules()
rules = [rule for rule in self.fw.iptables.ipv4['filter'].rules
if rule.chain == 'provider']
self.assertEqual(1, len(rules))
# Add another, refresh, and make sure number of rules goes to two
provider_fw1 = db.provider_fw_rule_create(admin_ctxt,
{'protocol': 'udp',
'cidr': '10.99.99.99/32',
'from_port': 1,
'to_port': 65535})
self.fw.refresh_provider_fw_rules()
rules = [rule for rule in self.fw.iptables.ipv4['filter'].rules
if rule.chain == 'provider']
self.assertEqual(2, len(rules))
# create the instance filter and make sure it has a jump rule
self.fw.prepare_instance_filter(instance_ref, network_info)
self.fw.apply_instance_filter(instance_ref, network_info)
inst_rules = [rule for rule in self.fw.iptables.ipv4['filter'].rules
if rule.chain == chain_name]
jump_rules = [rule for rule in inst_rules if '-j' in rule.rule]
provjump_rules = []
# IptablesTable doesn't make rules unique internally
for rule in jump_rules:
if 'provider' in rule.rule and rule not in provjump_rules:
provjump_rules.append(rule)
self.assertEqual(1, len(provjump_rules))
# remove a rule from the db, cast to compute to refresh rule
db.provider_fw_rule_destroy(admin_ctxt, provider_fw1['id'])
self.fw.refresh_provider_fw_rules()
rules = [rule for rule in self.fw.iptables.ipv4['filter'].rules
if rule.chain == 'provider']
self.assertEqual(1, len(rules))
class XenAPISRSelectionTestCase(test.TestCase):
"""Unit tests for testing we find the right SR."""
def setUp(self):
super(XenAPISRSelectionTestCase, self).setUp()
xenapi_fake.reset()
def test_safe_find_sr_raise_exception(self):
"""Ensure StorageRepositoryNotFound is raise when wrong filter."""
self.flags(sr_matching_filter='yadayadayada')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
session = xenapi_conn.XenAPISession('test_url', 'root', 'test_pass')
helper = vm_utils.VMHelper
helper.XenAPI = session.get_imported_xenapi()
self.assertRaises(exception.StorageRepositoryNotFound,
helper.safe_find_sr, session)
def test_safe_find_sr_local_storage(self):
"""Ensure the default local-storage is found."""
self.flags(sr_matching_filter='other-config:i18n-key=local-storage')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
session = xenapi_conn.XenAPISession('test_url', 'root', 'test_pass')
helper = vm_utils.VMHelper
helper.XenAPI = session.get_imported_xenapi()
host_ref = xenapi_fake.get_all('host')[0]
local_sr = xenapi_fake.create_sr(
name_label='Fake Storage',
type='lvm',
other_config={'i18n-original-value-name_label':
'Local storage',
'i18n-key': 'local-storage'},
host_ref=host_ref)
expected = helper.safe_find_sr(session)
self.assertEqual(local_sr, expected)
def test_safe_find_sr_by_other_criteria(self):
"""Ensure the SR is found when using a different filter."""
self.flags(sr_matching_filter='other-config:my_fake_sr=true')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
session = xenapi_conn.XenAPISession('test_url', 'root', 'test_pass')
helper = vm_utils.VMHelper
helper.XenAPI = session.get_imported_xenapi()
host_ref = xenapi_fake.get_all('host')[0]
local_sr = xenapi_fake.create_sr(name_label='Fake Storage',
type='lvm',
other_config={'my_fake_sr': 'true'},
host_ref=host_ref)
expected = helper.safe_find_sr(session)
self.assertEqual(local_sr, expected)
def test_safe_find_sr_default(self):
"""Ensure the default SR is found regardless of other-config."""
self.flags(sr_matching_filter='default-sr:true')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
session = xenapi_conn.XenAPISession('test_url', 'root', 'test_pass')
helper = vm_utils.VMHelper
pool_ref = xenapi_fake.create_pool('')
helper.XenAPI = session.get_imported_xenapi()
expected = helper.safe_find_sr(session)
self.assertEqual(session.call_xenapi('pool.get_default_SR', pool_ref),
expected)
class XenAPIAggregateTestCase(test.TestCase):
"""Unit tests for aggregate operations."""
def setUp(self):
super(XenAPIAggregateTestCase, self).setUp()
self.flags(xenapi_connection_url='http://test_url',
xenapi_connection_username='test_user',
xenapi_connection_password='test_pass',
instance_name_template='%d',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver',
host='host')
xenapi_fake.reset()
host_ref = xenapi_fake.get_all('host')[0]
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.context = context.get_admin_context()
self.conn = xenapi_conn.get_connection(False)
self.fake_metadata = {'main_compute': 'host',
'host': xenapi_fake.get_record('host',
host_ref)['uuid']}
def test_add_to_aggregate_called(self):
def fake_add_to_aggregate(context, aggregate, host):
fake_add_to_aggregate.called = True
self.stubs.Set(self.conn._pool,
"add_to_aggregate",
fake_add_to_aggregate)
self.conn.add_to_aggregate(None, None, None)
self.assertTrue(fake_add_to_aggregate.called)
def test_add_to_aggregate_for_first_host_sets_metadata(self):
def fake_init_pool(id, name):
fake_init_pool.called = True
self.stubs.Set(self.conn._pool, "_init_pool", fake_init_pool)
aggregate = self._aggregate_setup()
self.conn._pool.add_to_aggregate(self.context, aggregate, "host")
result = db.aggregate_get(self.context, aggregate.id)
self.assertTrue(fake_init_pool.called)
self.assertDictMatch(self.fake_metadata, result.metadetails)
self.assertEqual(aggregate_states.ACTIVE, result.operational_state)
def test_join_subordinate(self):
"""Ensure join_subordinate gets called when the request gets to main."""
def fake_join_subordinate(id, compute_uuid, host, url, user, password):
fake_join_subordinate.called = True
self.stubs.Set(self.conn._pool, "_join_subordinate", fake_join_subordinate)
aggregate = self._aggregate_setup(hosts=['host', 'host2'],
metadata=self.fake_metadata)
self.conn._pool.add_to_aggregate(self.context, aggregate, "host2",
compute_uuid='fake_uuid',
url='fake_url',
user='fake_user',
passwd='fake_pass',
xenhost_uuid='fake_uuid')
self.assertTrue(fake_join_subordinate.called)
def test_add_to_aggregate_first_host(self):
def fake_pool_set_name_label(self, session, pool_ref, name):
fake_pool_set_name_label.called = True
self.stubs.Set(xenapi_fake.SessionBase, "pool_set_name_label",
fake_pool_set_name_label)
self.conn._session.call_xenapi("pool.create", {"name": "asdf"})
values = {"name": 'fake_aggregate',
"availability_zone": 'fake_zone'}
result = db.aggregate_create(self.context, values)
db.aggregate_host_add(self.context, result.id, "host")
aggregate = db.aggregate_get(self.context, result.id)
self.assertEqual(["host"], aggregate.hosts)
self.assertEqual({}, aggregate.metadetails)
self.conn._pool.add_to_aggregate(self.context, aggregate, "host")
self.assertTrue(fake_pool_set_name_label.called)
def test_remove_from_aggregate_called(self):
def fake_remove_from_aggregate(context, aggregate, host):
fake_remove_from_aggregate.called = True
self.stubs.Set(self.conn._pool,
"remove_from_aggregate",
fake_remove_from_aggregate)
self.conn.remove_from_aggregate(None, None, None)
self.assertTrue(fake_remove_from_aggregate.called)
def test_remove_from_empty_aggregate(self):
values = {"name": 'fake_aggregate',
"availability_zone": 'fake_zone'}
result = db.aggregate_create(self.context, values)
self.assertRaises(exception.AggregateError,
self.conn._pool.remove_from_aggregate,
None, result, "test_host")
def test_remove_subordinate(self):
"""Ensure eject subordinate gets called."""
def fake_eject_subordinate(id, compute_uuid, host_uuid):
fake_eject_subordinate.called = True
self.stubs.Set(self.conn._pool, "_eject_subordinate", fake_eject_subordinate)
self.fake_metadata['host2'] = 'fake_host2_uuid'
aggregate = self._aggregate_setup(hosts=['host', 'host2'],
metadata=self.fake_metadata)
self.conn._pool.remove_from_aggregate(self.context, aggregate, "host2")
self.assertTrue(fake_eject_subordinate.called)
def test_remove_main_solo(self):
"""Ensure metadata are cleared after removal."""
def fake_clear_pool(id):
fake_clear_pool.called = True
self.stubs.Set(self.conn._pool, "_clear_pool", fake_clear_pool)
aggregate = self._aggregate_setup(aggr_state=aggregate_states.ACTIVE,
metadata=self.fake_metadata)
self.conn._pool.remove_from_aggregate(self.context, aggregate, "host")
result = db.aggregate_get(self.context, aggregate.id)
self.assertTrue(fake_clear_pool.called)
self.assertDictMatch({}, result.metadetails)
self.assertEqual(aggregate_states.ACTIVE, result.operational_state)
def test_remote_main_non_empty_pool(self):
"""Ensure AggregateError is raised if removing the main."""
aggregate = self._aggregate_setup(aggr_state=aggregate_states.ACTIVE,
hosts=['host', 'host2'],
metadata=self.fake_metadata)
self.assertRaises(exception.InvalidAggregateAction,
self.conn._pool.remove_from_aggregate,
self.context, aggregate, "host")
def _aggregate_setup(self, aggr_name='fake_aggregate',
aggr_zone='fake_zone',
aggr_state=aggregate_states.CREATED,
hosts=['host'], metadata=None):
values = {"name": aggr_name,
"availability_zone": aggr_zone,
"operational_state": aggr_state, }
result = db.aggregate_create(self.context, values)
for host in hosts:
db.aggregate_host_add(self.context, result.id, host)
if metadata:
db.aggregate_metadata_add(self.context, result.id, metadata)
return db.aggregate_get(self.context, result.id)
| usc-isi/extra-specs | nova/tests/test_xenapi.py | Python | apache-2.0 | 83,206 |
/**
* Copyright (C) 2012 Ness Computing, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nesscomputing.jersey.types;
import static com.google.common.base.Preconditions.checkNotNull;
import java.util.Objects;
import java.util.regex.Pattern;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
/**
* Simple Jersey date parameter class. Accepts either milliseconds since epoch UTC or ISO formatted dates.
* Will convert everything into UTC regardless of input timezone.
*/
public class DateParam
{
private static final Pattern NUMBER_PATTERN = Pattern.compile("-?\\d+");
private final DateTime dateTime;
DateParam(DateTime dateTime)
{
this.dateTime = checkNotNull(dateTime, "null datetime").withZone(DateTimeZone.UTC);
}
public static DateParam valueOf(DateTime dateTime)
{
return new DateParam(dateTime);
}
public static DateParam valueOf(String string)
{
if (string == null) {
return null;
}
if (NUMBER_PATTERN.matcher(string).matches()) {
return new DateParam(new DateTime(Long.parseLong(string), DateTimeZone.UTC));
} else {
return new DateParam(new DateTime(string, DateTimeZone.UTC));
}
}
/**
* @return a DateTime if the parameter was provided, or null otherwise.
*/
// This method is static so that you can handle optional parameters as null instances.
public static DateTime getDateTime(DateParam param)
{
return param == null ? null : param.dateTime;
}
@Override
public String toString()
{
return Objects.toString(dateTime);
}
}
| NessComputing/components-ness-jersey | jersey/src/main/java/com/nesscomputing/jersey/types/DateParam.java | Java | apache-2.0 | 2,189 |
module.exports = unbuild
unbuild.usage = "npm unbuild <folder>\n(this is plumbing)"
var readJson = require("read-package-json")
, rm = require("./utils/gently-rm.js")
, gentlyRm = require("./utils/gently-rm.js")
, npm = require("./npm.js")
, path = require("path")
, fs = require("graceful-fs")
, lifecycle = require("./utils/lifecycle.js")
, asyncMap = require("slide").asyncMap
, chain = require("slide").chain
, log = require("npmlog")
, build = require("./build.js")
// args is a list of folders.
// remove any bins/etc, and then DELETE the folder.
function unbuild (args, silent, cb) {
if (typeof silent === 'function') cb = silent, silent = false
asyncMap(args, unbuild_(silent), cb)
}
function unbuild_ (silent) { return function (folder, cb_) {
function cb (er) {
cb_(er, path.relative(npm.root, folder))
}
folder = path.resolve(folder)
delete build._didBuild[folder]
log.info(folder, "unbuild")
readJson(path.resolve(folder, "package.json"), function (er, pkg) {
// if no json, then just trash it, but no scripts or whatever.
if (er) return rm(folder, cb)
readJson.cache.del(folder)
chain
( [ [lifecycle, pkg, "preuninstall", folder, false, true]
, [lifecycle, pkg, "uninstall", folder, false, true]
, !silent && function(cb) {
console.log("unbuild " + pkg._id)
cb()
}
, [rmStuff, pkg, folder]
, [lifecycle, pkg, "postuninstall", folder, false, true]
, [rm, folder] ]
, cb )
})
}}
function rmStuff (pkg, folder, cb) {
// if it's global, and folder is in {prefix}/node_modules,
// then bins are in {prefix}/bin
// otherwise, then bins are in folder/../.bin
var parent = path.dirname(folder)
, gnm = npm.dir
, top = gnm === parent
readJson.cache.del(path.resolve(folder, "package.json"))
log.verbose([top, gnm, parent], "unbuild " + pkg._id)
asyncMap([rmBins, rmMans], function (fn, cb) {
fn(pkg, folder, parent, top, cb)
}, cb)
}
function rmBins (pkg, folder, parent, top, cb) {
if (!pkg.bin) return cb()
var binRoot = top ? npm.bin : path.resolve(parent, ".bin")
log.verbose([binRoot, pkg.bin], "binRoot")
asyncMap(Object.keys(pkg.bin), function (b, cb) {
if (process.platform === "win32") {
chain([ [rm, path.resolve(binRoot, b) + ".cmd"]
, [rm, path.resolve(binRoot, b) ] ], cb)
} else {
gentlyRm( path.resolve(binRoot, b)
, !npm.config.get("force") && folder
, cb )
}
}, cb)
}
function rmMans (pkg, folder, parent, top, cb) {
if (!pkg.man
|| !top
|| process.platform === "win32"
|| !npm.config.get("global")) {
return cb()
}
var manRoot = path.resolve(npm.config.get("prefix"), "share", "man")
asyncMap(pkg.man, function (man, cb) {
if (Array.isArray(man)) {
man.forEach(rm)
} else {
rm(man)
}
function rm(man) {
var parseMan = man.match(/(.*)\.([0-9]+)(\.gz)?$/)
, stem = parseMan[1]
, sxn = parseMan[2]
, gz = parseMan[3] || ""
, bn = path.basename(stem)
, manDest = path.join( manRoot
, "man"+sxn
, (bn.indexOf(pkg.name) === 0 ? bn
: pkg.name + "-" + bn)
+ "." + sxn + gz
)
gentlyRm( manDest
, !npm.config.get("force") && folder
, cb )
}
}, cb)
}
| fabm/BabyHelp | node_modules/npm/lib/unbuild.js | JavaScript | apache-2.0 | 3,505 |
package hu.akarnokd.rxjava;
import java.util.concurrent.TimeUnit;
import rx.*;
import rx.plugins.RxJavaHooks;
import rx.schedulers.Schedulers;
public class TrackSubscriber1 {
@SuppressWarnings("unchecked")
public static void main(String[] args) throws Exception {
RxJavaHooks.setOnObservableStart((observable, onSubscribe) -> {
if (!onSubscribe.getClass().getName().toLowerCase().contains("map")) {
return onSubscribe;
}
System.out.println("Started");
return (Observable.OnSubscribe<Object>)observer -> {
class SignalTracker extends Subscriber<Object> {
@Override public void onNext(Object o) {
// handle onNext before or aftern notifying the downstream
observer.onNext(o);
}
@Override public void onError(Throwable t) {
// handle onError
observer.onError(t);
}
@Override public void onCompleted() {
// handle onComplete
System.out.println("Completed");
observer.onCompleted();
}
}
SignalTracker t = new SignalTracker();
onSubscribe.call(t);
};
});
Observable<Integer> observable = Observable.range(1, 5)
.subscribeOn(Schedulers.io())
.observeOn(Schedulers.computation())
.map(integer -> {
try {
TimeUnit.SECONDS.sleep(1);
} catch (InterruptedException e) {
e.printStackTrace();
}
return integer * 3;
});
observable.subscribe(System.out::println);
Thread.sleep(6000L);
}
}
| akarnokd/akarnokd-misc | src/main/java/hu/akarnokd/rxjava/TrackSubscriber1.java | Java | apache-2.0 | 1,774 |
package tk.zielony.carbonsamples.feature;
import android.app.Activity;
import android.os.Bundle;
import tk.zielony.carbonsamples.R;
/**
* Created by Marcin on 2016-03-13.
*/
public class TextMarkerActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_textmarker);
}
}
| sevoan/Carbon | samples/src/main/java/tk/zielony/carbonsamples/feature/TextMarkerActivity.java | Java | apache-2.0 | 408 |
class Stage < ActiveRecord::Base
self.primary_key = 'reference'
include Taggable
end
| uken/penman | spec/dummy/app/models/stage.rb | Ruby | apache-2.0 | 90 |
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER
// EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE,
// FITNESS FOR A PARTICULAR PURPOSE, MERCHANTABLITY OR NON-INFRINGEMENT.
//
// See the Apache Version 2.0 License for specific language governing permissions and limitations under the License.
namespace Microsoft.Spectrum.Storage.Table.Azure.DataAccessLayer
{
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using Microsoft.Spectrum.Common;
using Microsoft.Spectrum.Storage.DataContracts;
using Microsoft.Spectrum.Storage.Table.Azure.Helpers;
using Microsoft.WindowsAzure.Storage.Table;
using UserRoleType = Microsoft.Spectrum.Storage.Enums.UserRoles;
/// <summary>
/// -----------------------------------------------------------------
/// Namespace: Microsoft.Spectrum.Storage.Table.Azure
/// Class: UserManagementTableOperations
/// Description: class containing operations to deal with user related tables
/// -----------------------------------------------------------------
public class UserManagementTableOperations : IUserManagementTableOperations
{
private readonly RetryAzureTableOperations<Users> userTableOperations;
private readonly RetryAzureTableOperations<UserRoles> userRolesTableOperations;
private readonly RetryAzureTableOperations<WebpagesOAuthMembership> oauthMembershipTableOperations;
/// <summary>
/// Initializes a new instance of the <see cref="UserManagementTableOperations"/> class
/// </summary>
/// <param name="dataContext">data context containing table references</param>
public UserManagementTableOperations(AzureTableDbContext dataContext)
{
if (dataContext == null)
{
throw new ArgumentNullException("dataContext");
}
this.userTableOperations = dataContext.UserTableOperations;
this.userTableOperations.GetTableReference(AzureTableHelper.UsersTable);
this.userRolesTableOperations = dataContext.UserRoleTableOperations;
this.userRolesTableOperations.GetTableReference(AzureTableHelper.UserRoleTable);
this.oauthMembershipTableOperations = dataContext.OAuthMembershipTableOperations;
this.oauthMembershipTableOperations.GetTableReference(AzureTableHelper.WebpagesOAuthMembershipTable);
}
public User GetUserByProviderUserId(string providerUserId)
{
return this.GetUserById(this.GetMembershipInfoByProviderUserId(providerUserId).UserId);
}
/// <summary>
/// Get user details by user id
/// </summary>
/// <param name="userId">user id</param>
/// <returns>user</returns>
public User GetUserByAccountEmail(string accountEmail)
{
if (string.IsNullOrWhiteSpace(accountEmail))
{
throw new ArgumentException("accountEmail should not be empty", "accountEmail");
}
string likeString = accountEmail.ToLower(CultureInfo.InvariantCulture).Replace(" ", string.Empty);
Users user = this.userTableOperations.QueryEntities<Users>(x => x.RowKey.IndexOf(likeString, StringComparison.OrdinalIgnoreCase) == 0).FirstOrDefault();
if (user != null)
{
return new User(user.Id, user.UserName, user.FirstName, user.LastName, user.Location, user.Region, user.TimeZoneId, user.PreferredEmail, user.AccountEmail, user.CreatedOn.ToString(), user.UpdatedTime.ToString(), user.Link, user.Gender, user.Address1, user.Address2, user.Phone, user.Country, user.ZipCode, user.PhoneCountryCode, user.SubscribeNotifications);
}
return null;
}
/// <summary>
/// Get user details by user name
/// </summary>
/// <param name="userName">user name</param>
/// <returns>user</returns>
public void InsertOrUpdateUser(User user)
{
if (user == null)
{
throw new ArgumentException("user can not be null", "user");
}
Users userTableEntity = new Users
{
AccountEmail = user.AccountEmail,
Address1 = user.Address1,
Address2 = user.Address2,
Country = user.Country,
CreatedOn = Convert.ToDateTime(user.CreatedOn, CultureInfo.InvariantCulture),
FirstName = user.FirstName,
Gender = user.Gender,
LastName = user.LastName,
Link = user.Link,
Location = user.Location,
PartitionKey = Constants.DummyPartitionKey,
RowKey = user.UserName.Replace(" ", string.Empty).ToLower(CultureInfo.InvariantCulture) + ":" + user.UserId.ToString(CultureInfo.InvariantCulture),
Phone = user.Phone,
PhoneCountryCode = user.PhoneCountryCode,
PreferredEmail = user.PreferredEmail,
Region = user.Region,
TimeZone = user.TimeZone,
TimeZoneId = user.TimeZoneId,
UpdatedTime = Convert.ToDateTime(user.UpdatedTime, CultureInfo.InvariantCulture),
UserName = user.UserName,
ZipCode = user.ZipCode,
SubscribeNotifications = user.SubscribeNotifications
};
this.userTableOperations.InsertOrReplaceEntity(userTableEntity, true);
}
/// <summary>
/// insert or update user
/// </summary>
/// <param name="user">user details</param>
public UserRole GetUserRole(int userId, Guid measurementStationId)
{
UserRoles userRoleEntity = this.userRolesTableOperations.GetByKeys<UserRoles>(measurementStationId.ToString(), userId.ToString(CultureInfo.InvariantCulture)).FirstOrDefault();
if (userRoleEntity != null)
{
return new UserRole(userRoleEntity.UserId, userRoleEntity.Role, userRoleEntity.MeasurementStationId);
}
return null;
}
public void RemoveAdmin(int userId, Guid measurementStationId)
{
UserRoles userRoleEntity = this.userRolesTableOperations.GetByKeys<UserRoles>(measurementStationId.ToString(), userId.ToString(CultureInfo.InvariantCulture)).FirstOrDefault();
this.userRolesTableOperations.DeleteEntity(userRoleEntity);
}
/// <summary>
/// Get user roles by UserId
/// </summary>
/// <param name="userId">user Id</param>
/// <returns>list of user roles</returns>
public IEnumerable<UserRole> GetUserRoles(int userId)
{
string query = TableQuery.GenerateFilterCondition("RowKey", QueryComparisons.Equal, userId.ToString(CultureInfo.InvariantCulture));
return this.userRolesTableOperations.ExecuteQueryWithContinuation<UserRoles>(query)
.Select(x => new UserRole(x.UserId, x.Role, x.MeasurementStationId));
}
/// <summary>
/// Get user roles by user id
/// </summary>
/// <param name="userId">user id</param>
/// <returns>user role</returns>
public void InsertOrUpdate(UserRole userRole)
{
if (userRole == null)
{
throw new ArgumentException("userRole can not be null", "userRole");
}
UserRoles userRoleEntity = new UserRoles(userRole.MeasurementStationId, userRole.UserId)
{
Role = userRole.Role
};
this.userRolesTableOperations.InsertOrReplaceEntity(userRoleEntity, true);
}
/// <summary>
/// insert or update user role
/// </summary>
/// <param name="userRole"></param>
public OAuthMembershipInfo GetMembershipInfoByProviderUserId(string providerUserId)
{
WebpagesOAuthMembership membershipEntity = this.oauthMembershipTableOperations.QueryEntities<WebpagesOAuthMembership>(x =>
{
System.Diagnostics.Debug.WriteLine(x.RowKey);
return x.RowKey.IndexOf(providerUserId, StringComparison.OrdinalIgnoreCase) == 0;
}).SingleOrDefault();
if (membershipEntity != null)
{
return new OAuthMembershipInfo(membershipEntity.ProviderUserId, membershipEntity.Provider, membershipEntity.UserId);
}
return null;
}
/// <summary>
/// Get membership info by provider user id
/// </summary>
/// <param name="providerUserId">provider user id</param>
/// <returns>membership info</returns>
public OAuthMembershipInfo GetMembershipInfoByUserId(int userId)
{
WebpagesOAuthMembership membershipEntity = this.oauthMembershipTableOperations.QueryEntities<WebpagesOAuthMembership>(x => x.RowKey.IndexOf(userId.ToString(CultureInfo.InvariantCulture), StringComparison.OrdinalIgnoreCase) > 0).SingleOrDefault();
return new OAuthMembershipInfo(membershipEntity.ProviderUserId, membershipEntity.Provider, membershipEntity.UserId);
}
/// <summary>
/// Gets membership info by user id
/// </summary>
/// <param name="userId">user id</param>
/// <returns>membership info</returns>
public void InsertOrUpdateMembershipInfo(OAuthMembershipInfo membershipInfo)
{
if (membershipInfo == null)
{
throw new ArgumentException("membershipInfo can not be null", "membershipInfo");
}
WebpagesOAuthMembership membershipEntity = new WebpagesOAuthMembership(membershipInfo.ProviderUserId, membershipInfo.UserId.ToString(CultureInfo.InvariantCulture), membershipInfo.Provider);
this.oauthMembershipTableOperations.InsertOrReplaceEntity(membershipEntity, true);
}
/// <summary>
/// Inserts or update membership info
/// </summary>
/// <param name="membershipInfo">membership info</param>
public User GetUserById(int userId)
{
string likeString = ":" + userId.ToString(CultureInfo.InvariantCulture);
Users user = this.userTableOperations.QueryEntities<Users>(x => x.RowKey.IndexOf(likeString, StringComparison.OrdinalIgnoreCase) > 0).SingleOrDefault();
if (user != null)
{
return new User(user.Id, user.UserName, user.FirstName, user.LastName, user.Location, user.Region, user.TimeZoneId, user.PreferredEmail, user.AccountEmail, user.CreatedOn.ToString(), user.UpdatedTime.ToString(), user.Link, user.Gender, user.Address1, user.Address2, user.Phone, user.Country, user.ZipCode, user.PhoneCountryCode, user.SubscribeNotifications);
}
return null;
}
/// <summary>
/// Gets all the Station administrators for the measurement station.
/// </summary>
/// <param name="measurementStationId">Measurement Station Id.</param>
/// <returns>Collection Station administrators.</returns>
public IEnumerable<User> GetAllStationAdmins(Guid measurementStationId)
{
if (measurementStationId == null)
{
throw new ArgumentNullException("measurementStationId", "Measurement station id can not be null");
}
string stationAdminRole = UserRoleType.StationAdmin.ToString();
string partitionKey = measurementStationId.ToString();
IEnumerable<User> stationAdmins = this.userRolesTableOperations.QueryEntities<UserRoles>(
(user) =>
{
return (user.PartitionKey == partitionKey)
&& (string.Compare(user.Role, stationAdminRole, StringComparison.OrdinalIgnoreCase) == 0);
})
.Select(stationAmdin => this.GetUserById(stationAmdin.UserId));
return stationAdmins;
}
/// <summary>
/// Gets all the Site administrators.
/// </summary>
/// <returns>Collection site administrators.</returns>
public IEnumerable<User> GetAllSiteAdmins()
{
string stationAdminRole = UserRoleType.SiteAdmin.ToString();
IEnumerable<User> siteAdmins = this.userRolesTableOperations.QueryEntities<UserRoles>(
(user) =>
{
// Idea behind having a different partition key for site Administrators instead of measurement station id is to avoid data redundancy that
// can occur have a corresponding entry for each measurement station for a given user
return (user.PartitionKey == Constants.SiteAdminsPartitionKey)
&& (string.Compare(user.Role, stationAdminRole, StringComparison.OrdinalIgnoreCase) == 0);
})
.Select(siteAdmin => this.GetUserById(siteAdmin.UserId));
return siteAdmins;
}
public User GetUserByEmail(string email)
{
Users user = this.userTableOperations.GetByKeys<Users>(Constants.DummyPartitionKey).Where(x => (string.Compare(x.AccountEmail, email, StringComparison.OrdinalIgnoreCase) == 0)).FirstOrDefault();
if (user != null)
{
return new User(user.Id, user.UserName, user.FirstName, user.LastName, user.Location, user.Region, user.TimeZoneId, user.PreferredEmail, user.AccountEmail, user.CreatedOn.ToString(CultureInfo.InvariantCulture), user.UpdatedTime.ToString(CultureInfo.InvariantCulture), user.Link, user.Gender, user.Address1, user.Address2, user.Phone, user.Country, user.ZipCode, user.PhoneCountryCode, user.SubscribeNotifications);
}
return null;
}
}
}
| cityscapesc/specobs | main/external/dev/Server/MS.Storage.Table.Azure/DataAccessLayer/UserManagementTableOperations.cs | C# | apache-2.0 | 14,425 |
/* */ package com.hundsun.network.gates.houchao.biz.services.pojo;
/* */
/* */ import org.springframework.context.annotation.Scope;
/* */ import org.springframework.stereotype.Service;
/* */
/* */ @Service("outFundTrans")
/* */ @Scope("prototype")
/* */ public class OutFundTrans extends InOutFundTrans
/* */ {
/* */ protected boolean isTrans()
/* */ {
/* 26 */ return true;
/* */ }
/* */
/* */ protected boolean isOutFund()
/* */ {
/* 31 */ return true;
/* */ }
/* */
/* */ protected boolean isNeedRecordUncomeFund()
/* */ {
/* 39 */ return false;
/* */ }
/* */
/* */ protected boolean isInOutTrans()
/* */ {
/* 49 */ return true;
/* */ }
/* */ }
/* Location: E:\__安装归档\linquan-20161112\deploy16\houchao\webroot\WEB-INF\classes\
* Qualified Name: com.hundsun.network.gates.houchao.biz.services.pojo.OutFundTrans
* JD-Core Version: 0.6.0
*/ | hnccfr/ccfrweb | fundcore/src/com/hundsun/network/gates/houchao/biz/services/pojo/OutFundTrans.java | Java | apache-2.0 | 1,022 |
import type { IProviderSettings } from '@spinnaker/core';
import { SETTINGS } from '@spinnaker/core';
export interface IAppengineProviderSettings extends IProviderSettings {
defaults: {
account?: string;
};
}
export const AppengineProviderSettings: IAppengineProviderSettings = (SETTINGS.providers
.appengine as IAppengineProviderSettings) || { defaults: {} };
if (AppengineProviderSettings) {
AppengineProviderSettings.resetToOriginal = SETTINGS.resetProvider('appengine');
}
| spinnaker/deck | packages/appengine/src/appengine.settings.ts | TypeScript | apache-2.0 | 491 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using System.Web.Optimization;
using System.Web.Routing;
namespace ExemploEFCrud
{
public class MvcApplication : System.Web.HttpApplication
{
protected void Application_Start()
{
AreaRegistration.RegisterAllAreas();
FilterConfig.RegisterGlobalFilters(GlobalFilters.Filters);
RouteConfig.RegisterRoutes(RouteTable.Routes);
BundleConfig.RegisterBundles(BundleTable.Bundles);
}
}
}
| carloscds/AspNetCoreOracle | ExemploEFCrud/ExemploEFCrud/Global.asax.cs | C# | apache-2.0 | 574 |
/*
* Copyright 2002-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.web.bind.support;
import org.springframework.beans.PropertyEditorRegistrar;
import org.springframework.core.convert.ConversionService;
import org.springframework.lang.Nullable;
import org.springframework.validation.BindingErrorProcessor;
import org.springframework.validation.MessageCodesResolver;
import org.springframework.validation.Validator;
import org.springframework.web.bind.WebDataBinder;
/**
* Convenient {@link WebBindingInitializer} for declarative configuration
* in a Spring application context. Allows for reusing pre-configured
* initializers with multiple controller/handlers.
*
* @author Juergen Hoeller
* @since 2.5
* @see #setDirectFieldAccess
* @see #setMessageCodesResolver
* @see #setBindingErrorProcessor
* @see #setValidator(Validator)
* @see #setConversionService(ConversionService)
* @see #setPropertyEditorRegistrar
*/
public class ConfigurableWebBindingInitializer implements WebBindingInitializer {
private boolean autoGrowNestedPaths = true;
private boolean directFieldAccess = false;
@Nullable
private MessageCodesResolver messageCodesResolver;
@Nullable
private BindingErrorProcessor bindingErrorProcessor;
@Nullable
private Validator validator;
@Nullable
private ConversionService conversionService;
@Nullable
private PropertyEditorRegistrar[] propertyEditorRegistrars;
/**
* Set whether a binder should attempt to "auto-grow" a nested path that contains a null value.
* <p>If "true", a null path location will be populated with a default object value and traversed
* instead of resulting in an exception. This flag also enables auto-growth of collection elements
* when accessing an out-of-bounds index.
* <p>Default is "true" on a standard DataBinder. Note that this feature is only supported
* for bean property access (DataBinder's default mode), not for field access.
* @see org.springframework.validation.DataBinder#initBeanPropertyAccess()
* @see org.springframework.validation.DataBinder#setAutoGrowNestedPaths
*/
public void setAutoGrowNestedPaths(boolean autoGrowNestedPaths) {
this.autoGrowNestedPaths = autoGrowNestedPaths;
}
/**
* Return whether a binder should attempt to "auto-grow" a nested path that contains a null value.
*/
public boolean isAutoGrowNestedPaths() {
return this.autoGrowNestedPaths;
}
/**
* Set whether to use direct field access instead of bean property access.
* <p>Default is {@code false}, using bean property access.
* Switch this to {@code true} in order to enforce direct field access.
* @see org.springframework.validation.DataBinder#initDirectFieldAccess()
* @see org.springframework.validation.DataBinder#initBeanPropertyAccess()
*/
public final void setDirectFieldAccess(boolean directFieldAccess) {
this.directFieldAccess = directFieldAccess;
}
/**
* Return whether to use direct field access instead of bean property access.
*/
public boolean isDirectFieldAccess() {
return this.directFieldAccess;
}
/**
* Set the strategy to use for resolving errors into message codes.
* Applies the given strategy to all data binders used by this controller.
* <p>Default is {@code null}, i.e. using the default strategy of
* the data binder.
* @see org.springframework.validation.DataBinder#setMessageCodesResolver
*/
public final void setMessageCodesResolver(@Nullable MessageCodesResolver messageCodesResolver) {
this.messageCodesResolver = messageCodesResolver;
}
/**
* Return the strategy to use for resolving errors into message codes.
*/
@Nullable
public final MessageCodesResolver getMessageCodesResolver() {
return this.messageCodesResolver;
}
/**
* Set the strategy to use for processing binding errors, that is,
* required field errors and {@code PropertyAccessException}s.
* <p>Default is {@code null}, that is, using the default strategy
* of the data binder.
* @see org.springframework.validation.DataBinder#setBindingErrorProcessor
*/
public final void setBindingErrorProcessor(@Nullable BindingErrorProcessor bindingErrorProcessor) {
this.bindingErrorProcessor = bindingErrorProcessor;
}
/**
* Return the strategy to use for processing binding errors.
*/
@Nullable
public final BindingErrorProcessor getBindingErrorProcessor() {
return this.bindingErrorProcessor;
}
/**
* Set the Validator to apply after each binding step.
*/
public final void setValidator(@Nullable Validator validator) {
this.validator = validator;
}
/**
* Return the Validator to apply after each binding step, if any.
*/
@Nullable
public final Validator getValidator() {
return this.validator;
}
/**
* Specify a ConversionService which will apply to every DataBinder.
* @since 3.0
*/
public final void setConversionService(@Nullable ConversionService conversionService) {
this.conversionService = conversionService;
}
/**
* Return the ConversionService which will apply to every DataBinder.
*/
@Nullable
public final ConversionService getConversionService() {
return this.conversionService;
}
/**
* Specify a single PropertyEditorRegistrar to be applied to every DataBinder.
*/
public final void setPropertyEditorRegistrar(PropertyEditorRegistrar propertyEditorRegistrar) {
this.propertyEditorRegistrars = new PropertyEditorRegistrar[] {propertyEditorRegistrar};
}
/**
* Specify multiple PropertyEditorRegistrars to be applied to every DataBinder.
*/
public final void setPropertyEditorRegistrars(@Nullable PropertyEditorRegistrar[] propertyEditorRegistrars) {
this.propertyEditorRegistrars = propertyEditorRegistrars;
}
/**
* Return the PropertyEditorRegistrars to be applied to every DataBinder.
*/
@Nullable
public final PropertyEditorRegistrar[] getPropertyEditorRegistrars() {
return this.propertyEditorRegistrars;
}
@Override
public void initBinder(WebDataBinder binder) {
binder.setAutoGrowNestedPaths(this.autoGrowNestedPaths);
if (this.directFieldAccess) {
binder.initDirectFieldAccess();
}
if (this.messageCodesResolver != null) {
binder.setMessageCodesResolver(this.messageCodesResolver);
}
if (this.bindingErrorProcessor != null) {
binder.setBindingErrorProcessor(this.bindingErrorProcessor);
}
if (this.validator != null && binder.getTarget() != null &&
this.validator.supports(binder.getTarget().getClass())) {
binder.setValidator(this.validator);
}
if (this.conversionService != null) {
binder.setConversionService(this.conversionService);
}
if (this.propertyEditorRegistrars != null) {
for (PropertyEditorRegistrar propertyEditorRegistrar : this.propertyEditorRegistrars) {
propertyEditorRegistrar.registerCustomEditors(binder);
}
}
}
}
| spring-projects/spring-framework | spring-web/src/main/java/org/springframework/web/bind/support/ConfigurableWebBindingInitializer.java | Java | apache-2.0 | 7,350 |
/*
* Copyright 2000-2013 JetBrains s.r.o.
* Copyright 2014-2015 AS3Boyan
* Copyright 2014-2014 Elias Ku
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// This is a generated file. Not intended for manual editing.
package com.intellij.plugins.haxe.lang.psi.impl;
import java.util.List;
import org.jetbrains.annotations.*;
import com.intellij.lang.ASTNode;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiElementVisitor;
import com.intellij.psi.util.PsiTreeUtil;
import static com.intellij.plugins.haxe.lang.lexer.HaxeTokenTypes.*;
import com.intellij.plugins.haxe.lang.psi.*;
public class HaxeMultiplicativeExpressionImpl extends HaxeExpressionImpl implements HaxeMultiplicativeExpression {
public HaxeMultiplicativeExpressionImpl(ASTNode node) {
super(node);
}
public void accept(@NotNull PsiElementVisitor visitor) {
if (visitor instanceof HaxeVisitor) ((HaxeVisitor)visitor).visitMultiplicativeExpression(this);
else super.accept(visitor);
}
@Override
@NotNull
public List<HaxeExpression> getExpressionList() {
return PsiTreeUtil.getChildrenOfTypeAsList(this, HaxeExpression.class);
}
@Override
@Nullable
public HaxeIfStatement getIfStatement() {
return findChildByClass(HaxeIfStatement.class);
}
@Override
@Nullable
public HaxeSwitchStatement getSwitchStatement() {
return findChildByClass(HaxeSwitchStatement.class);
}
@Override
@Nullable
public HaxeTryStatement getTryStatement() {
return findChildByClass(HaxeTryStatement.class);
}
}
| yanhick/intellij-haxe-nightly-builds | gen/com/intellij/plugins/haxe/lang/psi/impl/HaxeMultiplicativeExpressionImpl.java | Java | apache-2.0 | 2,047 |
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import re
import subprocess
from pants.backend.codegen.subsystems.thrift_defaults import ThriftDefaults
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TaskError
from pants.base.workunit import WorkUnitLabel
from pants.binaries.thrift_binary import ThriftBinary
from pants.task.simple_codegen_task import SimpleCodegenTask
from pants.util.dirutil import safe_mkdir
from pants.util.memo import memoized_property
from twitter.common.collections import OrderedSet
from pants.contrib.go.targets.go_thrift_library import GoThriftGenLibrary, GoThriftLibrary
class GoThriftGen(SimpleCodegenTask):
@classmethod
def register_options(cls, register):
super(GoThriftGen, cls).register_options(register)
register('--strict', default=True, fingerprint=True, type=bool,
help='Run thrift compiler with strict warnings.')
register('--gen-options', advanced=True, fingerprint=True,
help='Use these apache thrift go gen options.')
register('--thrift-import', advanced=True,
help='Use this thrift-import gen option to thrift.')
register('--thrift-import-target', advanced=True,
help='Use this thrift import on symbolic defs.')
@classmethod
def subsystem_dependencies(cls):
return (super(GoThriftGen, cls).subsystem_dependencies() +
(ThriftDefaults, ThriftBinary.Factory.scoped(cls)))
@memoized_property
def _thrift_binary(self):
thrift_binary = ThriftBinary.Factory.scoped_instance(self).create()
return thrift_binary.path
@memoized_property
def _deps(self):
thrift_import_target = self.get_options().thrift_import_target
thrift_imports = self.context.resolve(thrift_import_target)
return thrift_imports
@memoized_property
def _service_deps(self):
service_deps = self.get_options().get('service_deps')
return list(self.resolve_deps(service_deps)) if service_deps else self._deps
SERVICE_PARSER = re.compile(r'^\s*service\s+(?:[^\s{]+)')
NAMESPACE_PARSER = re.compile(r'^\s*namespace go\s+([^\s]+)', re.MULTILINE)
def _declares_service(self, source):
with open(source) as thrift:
return any(line for line in thrift if self.SERVICE_PARSER.search(line))
def _get_go_namespace(self, source):
with open(source) as thrift:
namespace = self.NAMESPACE_PARSER.search(thrift.read())
if not namespace:
raise TaskError('Thrift file {} must contain "namespace go "', source)
return namespace.group(1)
def synthetic_target_extra_dependencies(self, target, target_workdir):
for source in target.sources_relative_to_buildroot():
if self._declares_service(os.path.join(get_buildroot(), source)):
return self._service_deps
return self._deps
def synthetic_target_type(self, target):
return GoThriftGenLibrary
def is_gentarget(self, target):
return isinstance(target, GoThriftLibrary)
@memoized_property
def _thrift_cmd(self):
cmd = [self._thrift_binary]
thrift_import = 'thrift_import={}'.format(self.get_options().thrift_import)
gen_options = self.get_options().gen_options
if gen_options:
gen_options += ',' + thrift_import
else:
gen_options = thrift_import
cmd.extend(('--gen', 'go:{}'.format(gen_options)))
if self.get_options().strict:
cmd.append('-strict')
if self.get_options().level == 'debug':
cmd.append('-verbose')
return cmd
def _generate_thrift(self, target, target_workdir):
target_cmd = self._thrift_cmd[:]
bases = OrderedSet(tgt.target_base for tgt in target.closure() if self.is_gentarget(tgt))
for base in bases:
target_cmd.extend(('-I', base))
target_cmd.extend(('-o', target_workdir))
all_sources = list(target.sources_relative_to_buildroot())
if len(all_sources) != 1:
raise TaskError('go_thrift_library only supports a single .thrift source file for {}.', target)
source = all_sources[0]
target_cmd.append(os.path.join(get_buildroot(), source))
with self.context.new_workunit(name=source,
labels=[WorkUnitLabel.TOOL],
cmd=' '.join(target_cmd)) as workunit:
result = subprocess.call(target_cmd,
stdout=workunit.output('stdout'),
stderr=workunit.output('stderr'))
if result != 0:
raise TaskError('{} ... exited non-zero ({})'.format(self._thrift_binary, result))
gen_dir = os.path.join(target_workdir, 'gen-go')
src_dir = os.path.join(target_workdir, 'src')
safe_mkdir(src_dir)
go_dir = os.path.join(target_workdir, 'src', 'go')
os.rename(gen_dir, go_dir)
@classmethod
def product_types(cls):
return ['go']
def execute_codegen(self, target, target_workdir):
self._generate_thrift(target, target_workdir)
@property
def _copy_target_attributes(self):
"""Override `_copy_target_attributes` to exclude `provides`."""
return [a for a in super(GoThriftGen, self)._copy_target_attributes if a != 'provides']
def synthetic_target_dir(self, target, target_workdir):
all_sources = list(target.sources_relative_to_buildroot())
source = all_sources[0]
namespace = self._get_go_namespace(source)
return os.path.join(target_workdir, 'src', 'go', namespace.replace(".", os.path.sep))
| cevaris/pants | contrib/go/src/python/pants/contrib/go/tasks/go_thrift_gen.py | Python | apache-2.0 | 5,677 |
var gplay = require('google-play-scraper');
var fs = require('fs')
var Promise = require('promise');
var myArgs = process.argv.slice(2);
var passed_appid = myArgs[0];
var passed_appcount = myArgs[1];
console.log(passed_appid);
var read = Promise.denodeify(fs.readFile);
var write = Promise.denodeify(fs.writeFile);
var dir = './dataset/' + passed_appcount;
gplay.app({appId: passed_appid})
.then(function (str) {
if(JSON.stringify(str, null, ' ').indexOf("title") > -1) {
if (!fs.existsSync(dir)){
fs.mkdirSync(dir);
}
return write(dir + '/meta.json', JSON.stringify(str, null, ' '), 'utf8')
} else {
console.log('app doesnt exist');
return false
}
})
.then(function (){process.exit()}); | iresium/apprater | scraper.js | JavaScript | apache-2.0 | 764 |
// Models
app.SearchModel = Backbone.Model.extend({
idAttribute: "session_token",
urlRoot: function() {
var u = '/search/' + this.id;
return u;
}
});
// Collections
app.SearchCollection = Backbone.Collection.extend({
model: app.SearchModel,
url: function() {
if (typeof this.id === 'undefined')
return '/search';
else
return '/search/' + this.id;
},
initialize: function(options) {
if (typeof options != 'undefined')
this.id = options.session_token;
}
});
// Views
app.cardList = Backbone.View.extend({
el: '#cardList'
});
app.cardView = Backbone.View.extend({
tagName: 'div',
initialize: function(card) {
this.card = card;
},
template: _.template($("#card-template").html()),
render: function(cardList) {
this.$el.html(this.template({
card: this.card
}));
this.$el.addClass('card');
cardList.$el.append(this.el);
return this;
}
}); | GOPINATH-GS4/stock | public/lib/cards.js | JavaScript | apache-2.0 | 1,119 |
package com.yuzhou.viewer.service;
import android.content.Context;
import android.os.AsyncTask;
import android.util.Log;
import android.widget.Toast;
import com.google.common.eventbus.EventBus;
import com.loopj.android.http.JsonHttpResponseHandler;
import com.loopj.android.http.SyncHttpClient;
import com.yuzhou.viewer.R;
import com.yuzhou.viewer.model.GoogleImage;
import com.yuzhou.viewer.model.GoogleImageFactory;
import org.apache.http.Header;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.List;
/**
* Created by yuzhou on 2015/08/02.
*/
public class GoogleApiTask extends AsyncTask<ApiParam, Integer, List<GoogleImage>>
{
private final SyncHttpClient client = new SyncHttpClient();
private final List<GoogleImage> items = new ArrayList<>();
private final EventBus eventBus;
private final Context context;
private int errorResource;
public GoogleApiTask(EventBus eventBus, Context context)
{
this.eventBus = eventBus;
this.context = context;
}
private List<GoogleImage> interExecute(ApiParam request)
{
Log.d("VIEWER", request.toString());
client.get(request.getUrl(), request.getParams(), new JsonHttpResponseHandler()
{
@Override
public void onFailure(int statusCode, Header[] headers, Throwable throwable, JSONObject response)
{
Log.i("VIEWER", "status code=" + statusCode + ", response=" + response + ", error=" + throwable.getMessage());
errorResource = R.string.error_unavailable_network;
}
@Override
public void onSuccess(int statusCode, Header[] headers, JSONObject response)
{
if (response == null) {
Log.i("VIEWER", "Response no context");
errorResource = R.string.error_server_side;
return;
}
try {
int httpCode = response.getInt("responseStatus");
if (httpCode == 400) {
errorResource = R.string.error_data_not_found;
Log.d("VIEWER", "response=" + response.getString("responseDetails"));
return;
}
if (httpCode != 200) {
errorResource = R.string.error_server_side;
Log.d("VIEWER", "response=" + response.getString("responseDetails"));
return;
}
List<GoogleImage> images = GoogleImageFactory.create(response);
if (images.isEmpty()) {
Log.i("VIEWER", "Can not parse JSON");
Log.d("VIEWER", "response=" + response.toString());
errorResource = R.string.error_server_side;
return;
}
items.addAll(images);
} catch (JSONException e) {
Log.i("VIEWER", "Can not parse JSON");
e.printStackTrace();
}
}
});
return items;
}
@Override
protected List<GoogleImage> doInBackground(ApiParam... requests)
{
ApiParam request = requests[0];
return interExecute(request);
}
@Override
protected void onPreExecute()
{
items.clear();
}
@Override
protected void onPostExecute(List<GoogleImage> googleImages)
{
if (errorResource > 0) {
Toast.makeText(context, errorResource, Toast.LENGTH_LONG).show();
}
eventBus.post(items);
}
} | yuzhou2/android_02_grid_image_search | app/src/main/java/com/yuzhou/viewer/service/GoogleApiTask.java | Java | apache-2.0 | 3,730 |
<?php
/**
* Smarty Internal Plugin Compile Insert
*
* Compiles the {insert} tag
*
* @package Smarty
* @subpackage Compiler
* @author Uwe Tews
*/
/**
* Smarty Internal Plugin Compile Insert Class
*
* @package Smarty
* @subpackage Compiler
*/
class Smarty_Internal_Compile_Insert extends Smarty_Internal_CompileBase {
/**
* Attribute definition: Overwrites base class.
*
* @var array
* @see Smarty_Internal_CompileBase
*/
public $required_attributes = array('name');
/**
* Attribute definition: Overwrites base class.
*
* @var array
* @see Smarty_Internal_CompileBase
*/
public $shorttag_order = array('name');
/**
* Attribute definition: Overwrites base class.
*
* @var array
* @see Smarty_Internal_CompileBase
*/
public $optional_attributes = array('_any');
/**
* Compiles code for the {insert} tag
*
* @param array $args array with attributes from parser
* @param object $compiler compiler object
*
* @return string compiled code
*/
public function compile($args, $compiler) {
// check and get attributes
$_attr = $this->getAttributes($compiler, $args);
// never compile as nocache code
$compiler->suppressNocacheProcessing = true;
$compiler->tag_nocache = true;
$_smarty_tpl = $compiler->template;
$_name = null;
$_script = null;
$_output = '<?php ';
// save posible attributes
eval('$_name = ' . $_attr['name'] . ';');
if (isset($_attr['assign']))
{
// output will be stored in a smarty variable instead of being displayed
$_assign = $_attr['assign'];
// create variable to make shure that the compiler knows about its nocache status
$compiler->template->tpl_vars[trim($_attr['assign'], "'")] = new Smarty_Variable(null, true);
}
if (isset($_attr['script']))
{
// script which must be included
$_function = "smarty_insert_{$_name}";
$_smarty_tpl = $compiler->template;
$_filepath = false;
eval('$_script = ' . $_attr['script'] . ';');
if (!isset($compiler->smarty->security_policy) && file_exists($_script))
{
$_filepath = $_script;
}
else
{
if (isset($compiler->smarty->security_policy))
{
$_dir = $compiler->smarty->security_policy->trusted_dir;
}
else
{
$_dir = $compiler->smarty->trusted_dir;
}
if (!empty($_dir))
{
foreach ((array)$_dir as $_script_dir)
{
$_script_dir = rtrim($_script_dir, '/\\') . DS;
if (file_exists($_script_dir . $_script))
{
$_filepath = $_script_dir . $_script;
break;
}
}
}
}
if ($_filepath == false)
{
$compiler->trigger_template_error("{insert} missing script file '{$_script}'", $compiler->lex->taglineno);
}
// code for script file loading
$_output .= "require_once '{$_filepath}' ;";
require_once $_filepath;
if (!is_callable($_function))
{
$compiler->trigger_template_error(" {insert} function '{$_function}' is not callable in script file '{$_script}'", $compiler->lex->taglineno);
}
}
else
{
$_filepath = 'null';
$_function = "insert_{$_name}";
// function in PHP script ?
if (!is_callable($_function))
{
// try plugin
if (!$_function = $compiler->getPlugin($_name, 'insert'))
{
$compiler->trigger_template_error("{insert} no function or plugin found for '{$_name}'", $compiler->lex->taglineno);
}
}
}
// delete {insert} standard attributes
unset($_attr['name'], $_attr['assign'], $_attr['script'], $_attr['nocache']);
// convert attributes into parameter array string
$_paramsArray = array();
foreach ($_attr as $_key => $_value)
{
$_paramsArray[] = "'$_key' => $_value";
}
$_params = 'array(' . implode(", ", $_paramsArray) . ')';
// call insert
if (isset($_assign))
{
if ($_smarty_tpl->caching)
{
$_output .= "echo Smarty_Internal_Nocache_Insert::compile ('{$_function}',{$_params}, \$_smarty_tpl, '{$_filepath}',{$_assign});?>";
}
else
{
$_output .= "\$_smarty_tpl->assign({$_assign} , {$_function} ({$_params},\$_smarty_tpl), true);?>";
}
}
else
{
$compiler->has_output = true;
if ($_smarty_tpl->caching)
{
$_output .= "echo Smarty_Internal_Nocache_Insert::compile ('{$_function}',{$_params}, \$_smarty_tpl, '{$_filepath}');?>";
}
else
{
$_output .= "echo {$_function}({$_params},\$_smarty_tpl);?>";
}
}
return $_output;
}
}
?>
| slayerhover/yaf-cms-backoffice | library/Smarty/sysplugins/smarty_internal_compile_insert.php | PHP | apache-2.0 | 4,415 |
package com.rockhoppertech.music.fx.cmn;
/*
* #%L
* rockymusic-fx
* %%
* Copyright (C) 1996 - 2013 Rockhopper Technologies
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.io.IOException;
import javafx.application.Application;
import javafx.fxml.FXMLLoader;
import javafx.scene.Scene;
import javafx.scene.SceneBuilder;
import javafx.scene.layout.AnchorPane;
import javafx.scene.layout.Pane;
import javafx.scene.paint.Color;
import javafx.stage.Screen;
import javafx.stage.Stage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author <a href="http://genedelisa.com/">Gene De Lisa</a>
*
*/
public class GrandStaffApp extends Application {
private static final Logger logger = LoggerFactory
.getLogger(GrandStaffApp.class);
private Stage stage;
private Scene scene;
private Pane root;
private GrandStaffAppController controller;
public static void main(String[] args) throws Exception {
launch(args);
}
void loadRootFxml() {
String fxmlFile = "/fxml/GrandStaffPanel.fxml";
logger.debug("Loading FXML for main view from: {}", fxmlFile);
try {
FXMLLoader loader = new FXMLLoader(
GrandStaffApp.class.getResource(fxmlFile));
root = (AnchorPane) loader.load();
controller = loader.getController();
} catch (IOException e) {
logger.error(e.getLocalizedMessage(),e);
}
}
@Override
public void start(Stage stage) throws Exception {
this.stage = stage;
// this.staffModel = new StaffModel();
// MIDITrack track = MIDITrackBuilder
// .create()
// .noteString(
// "E5 F G Ab G# A B C C6 D Eb F# G A B C7 B4 Bf4 A4 Af4")
// .durations(5, 4, 3, 2, 1, 1.5, .5, .75, .25, .25)
// .sequential()
// .build();
// this.staffModel.setTrack(track);
loadRootFxml();
this.scene = SceneBuilder.create()
.root(root)
.fill(Color.web("#1030F0"))
.stylesheets("/styles/grandStaffApp.css")
.build();
this.configureStage();
logger.debug("started");
}
private void configureStage() {
stage.setTitle("Music Notation");
// fullScreen();
stage.setScene(this.scene);
stage.show();
}
private void fullScreen() {
// make it full screen
stage.setX(0);
stage.setY(0);
stage.setWidth(Screen.getPrimary().getVisualBounds().getWidth());
stage.setHeight(Screen.getPrimary().getVisualBounds().getHeight());
}
}
| genedelisa/rockymusic | rockymusic-fx/src/main/java/com/rockhoppertech/music/fx/cmn/GrandStaffApp.java | Java | apache-2.0 | 3,230 |
function htmlEncode(value){
return $('<div/>').text(value).html();
}
var app = angular.module("oasassets",[]).controller("snippetsController",function($scope){
$scope.snippet = function(item){
var elem = $("#"+item);
var contents = elem.html().trim();
elem.html(htmlEncode(contents));
$('pre code').each(function(i, block) {
hljs.highlightBlock(block);
});
};
$scope.loadMenu = function(){
$('#side-menu').metisMenu();
};
});
| bizoru/assets-oas | js/app.js | JavaScript | apache-2.0 | 510 |
package org.tuxdevelop.spring.batch.lightmin.server.cluster.configuration;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.NestedConfigurationProperty;
@Data
@ConfigurationProperties(prefix = "spring.batch.lightmin.server.cluster.infinispan")
public class InfinispanServerClusterConfigurationProperties {
@NestedConfigurationProperty
private RepositoryConfigurationProperties repository = new RepositoryConfigurationProperties();
@Data
static class RepositoryConfigurationProperties {
private Boolean initSchedulerExecutionRepository = Boolean.FALSE;
private Boolean initSchedulerConfigurationRepository = Boolean.FALSE;
}
}
| tuxdevelop/spring-batch-lightmin | spring-batch-lightmin-server/spring-batch-lightmin-server-cluster/spring-batch-lightmin-server-cluster-infinispan/src/main/java/org/tuxdevelop/spring/batch/lightmin/server/cluster/configuration/InfinispanServerClusterConfigurationProperties.java | Java | apache-2.0 | 765 |
<?php
$this->Nm_lang_conf_region = array();
$this->Nm_lang_conf_region['en_us;en_us'] = "English (United States)";
$this->Nm_lang_conf_region['es;es_es'] = "Spanish (Spain)";
$this->Nm_lang_conf_region['pt_br;pt_br'] = "Portuguese (Brazil)";
ksort($this->Nm_lang_conf_region);
?> | oprohonnyi/php_trn | 5_dev_tools/scriptcase/PHPTrn/_lib/lang/lang_config_region.php | PHP | apache-2.0 | 289 |
module SchemaEvolutionManager
# Container for common args, mainly to have stricter validation on
# inputs. Tried to use GetoptLong but could not write solid unit
# tests around it... so we have our own internal simple implementation.
class Args
if !defined?(FLAGS_WITH_ARGUMENTS)
FLAGS_WITH_ARGUMENTS = {
:artifact_name => "Specifies the name of the artifact. Tag will be appended to this name",
:user => "Connect to the database as this username instead of the default",
:host => "Specifies the host name of the machine on which the server is running",
:port => "Specifies the port on which the server is running",
:name => "Specifies the name of the database to which to connect",
:url => "The connection string for the psql database",
:dir => "Path to a directory",
:tag => "A git tag (e.g. 0.0.1)",
:prefix => "Configure installer to use this prefix",
:set => "Passthrough for postgresql --set argument"
}
FLAGS_NO_ARGUMENTS = {
:password => "Prompt user to enter password for the database user. Password is stored for the duration of the process",
:dry_run => "Include flag to echo commands that will run without actually executing them",
:help => "Display help",
:verbose => "Enable verbose logging of all system calls",
}
end
attr_reader :artifact_name, :host, :port, :name, :prefix, :url, :user, :dir, :dry_run, :tag, :password, :set
# args: Actual string arguments
# :required => list of parameters that are required
# :optional => list of parameters that are optional
def initialize(args, opts={})
Preconditions.assert_class_or_nil(args, String)
required = (opts.delete(:required) || []).map { |flag| format_flag(flag) }
optional = (opts.delete(:optional) || []).map { |flag| format_flag(flag) }
Preconditions.assert_class(required, Array)
Preconditions.assert_class(optional, Array)
Preconditions.assert_empty_opts(opts)
Preconditions.check_state(optional.size + required.size > 0,
"Must have at least 1 optional or required parameter")
if !optional.include?(:help)
optional << :help
end
if !optional.include?(:verbose)
optional << :verbose
end
found_arguments = parse_string_arguments(args)
missing = required.select { |field| blank?(found_arguments[field]) }
@artifact_name = found_arguments.delete(:artifact_name)
@host = found_arguments.delete(:host)
@port = found_arguments.delete(:port)
@name = found_arguments.delete(:name)
@prefix = found_arguments.delete(:prefix)
@url = found_arguments.delete(:url)
@user = found_arguments.delete(:user)
@dir = found_arguments.delete(:dir)
@tag = found_arguments.delete(:tag)
@set = found_arguments.delete(:set)
@dry_run = found_arguments.delete(:dry_run)
@password = found_arguments.delete(:password)
@help = found_arguments.delete(:help)
@verbose = found_arguments.delete(:verbose)
Preconditions.check_state(found_arguments.empty?,
"Did not handle all flags: %s" % found_arguments.keys.join(" "))
if @help
RdocUsage.printAndExit(0)
end
if @verbose
Library.set_verbose(true)
end
if !missing.empty?
missing_fields_error(required, optional, missing)
end
end
# Hack to minimize bleeding from STDIN. Returns an instance of Args class
def Args.from_stdin(opts)
values = ARGV.join(" ")
Args.new(values, opts)
end
private
def blank?(value)
value.to_s.strip == ""
end
def missing_fields_error(required, optional, fields)
Preconditions.assert_class(fields, Array)
Preconditions.check_state(!fields.empty?, "Missing fields cannot be empty")
title = fields.size == 1 ? "Missing parameter" : "Missing parameters"
sorted = fields.sort_by { |f| f.to_s }
puts "**************************************************"
puts "ERROR: #{title}: #{sorted.join(", ")}"
puts "**************************************************"
puts help_parameters("Required parameters", required)
puts help_parameters("Optional parameters", optional)
exit(1)
end
def help_parameters(title, parameters)
docs = []
if !parameters.empty?
docs << ""
docs << title
docs << "-------------------"
parameters.each do |flag|
documentation = FLAGS_WITH_ARGUMENTS[flag] || FLAGS_NO_ARGUMENTS[flag]
Preconditions.check_not_null(documentation, "No documentation found for flag[%s]" % flag)
docs << " --#{flag}"
docs << " " + documentation
docs << ""
end
end
docs.join("\n")
end
def parse_string_arguments(args)
Preconditions.assert_class_or_nil(args, String)
found = {}
index = 0
values = args.to_s.strip.split(/\s+/)
while index < values.length do
flag = format_flag(values[index])
index += 1
if FLAGS_WITH_ARGUMENTS.has_key?(flag)
found[flag] = values[index]
index += 1
elsif FLAGS_NO_ARGUMENTS.has_key?(flag)
found[flag] = true
else
raise "Unknown flag[%s]" % flag
end
end
found
end
# Strip leading dashes and convert to symbol
def format_flag(flag)
Preconditions.assert_class(flag, String)
flag.sub(/^\-\-/, '').to_sym
end
end
end
| mbryzek/schema-evolution-manager | lib/schema-evolution-manager/args.rb | Ruby | apache-2.0 | 5,648 |
<?php
namespace Ajax\semantic\components\validation;
use Ajax\service\AjaxCall;
use Ajax\JsUtils;
/**
* @author jc
* @version 1.001
* Generates a JSON Rule for the validation of a field
*/
class Rule implements \JsonSerializable{
/**
* @var string
*/
private $type;
/**
* @var string
*/
private $prompt;
/**
* @var string
*/
private $value;
public function __construct($type,$prompt=NULL,$value=NULL){
$this->type=$type;
$this->prompt=$prompt;
$this->value=$value;
}
public function getType() {
return $this->type;
}
public function setType($type) {
$this->type=$type;
return $this;
}
public function getPrompt() {
return $this->prompt;
}
public function setPrompt($prompt) {
$this->prompt=$prompt;
return $this;
}
public function getValue() {
return $this->value;
}
public function setValue($value) {
$this->value=$value;
return $this;
}
#[\ReturnTypeWillChange]
public function jsonSerialize() {
$result= ["type"=>$this->type];
if(isset($this->prompt))
$result["prompt"]=$this->prompt;
if(isset($this->value))
$result["value"]=$this->value;
return $result;
}
/**
* A field should match the value of another validation field, for example to confirm passwords
* @param string $name
* @param string $prompt
* @return \Ajax\semantic\components\validation\Rule
*/
public static function match($name,$prompt=NULL){
return new Rule("match[".$name."]",$prompt);
}
/**
* A field should be different than another specified field
* @param string $name
* @param string $prompt
* @return \Ajax\semantic\components\validation\Rule
*/
public static function different($name,$prompt=NULL){
return new Rule("different[".$name."]",$prompt);
}
/**
* A field is an integer value, or matches an integer range
* @param int|NULL $min
* @param int|NULL $max
* @param string $prompt
* @return \Ajax\semantic\components\validation\Rule
*/
public static function integer($min=NULL,$max=NULL,$prompt=NULL){
if(\is_int($min) && \is_int($max))
return new Rule("integer[{$min}..{$max}]",$prompt);
return new Rule("integer",$prompt);
}
public static function decimal($prompt=NULL){
return new Rule("decimal",$prompt);
}
public static function number($prompt=NULL){
return new Rule("number",$prompt);
}
public static function is($value,$prompt=NULL){
return new Rule("is[".$value."]",$prompt);
}
public static function isExactly($value,$prompt=NULL){
return new Rule("isExactly[".$value."]",$prompt);
}
public static function not($value,$prompt=NULL){
return new Rule("not[".$value."]",$prompt);
}
public static function notExactly($value,$prompt=NULL){
return new Rule("notExactly[".$value."]",$prompt);
}
public static function contains($value,$prompt=NULL){
return new Rule("contains[".$value."]",$prompt);
}
public static function containsExactly($value,$prompt=NULL){
return new Rule("containsExactly[".$value."]",$prompt);
}
public static function doesntContain($value,$prompt=NULL){
return new Rule("doesntContain[".$value."]",$prompt);
}
public static function doesntContainExactly($value,$prompt=NULL){
return new Rule("doesntContainExactly[".$value."]",$prompt);
}
public static function minCount($value,$prompt=NULL){
return new Rule("minCount[".$value."]",$prompt);
}
public static function maxCount($value,$prompt=NULL){
return new Rule("maxCount[".$value."]",$prompt);
}
public static function exactCount($value,$prompt=NULL){
return new Rule("exactCount[".$value."]",$prompt);
}
public static function email($prompt=NULL){
return new Rule("email",$prompt);
}
public static function url($prompt=NULL){
return new Rule("url",$prompt);
}
public static function regExp($value,$prompt=NULL){
return new Rule("regExp",$prompt,$value);
}
public static function custom($name,$jsFunction){
return "$.fn.form.settings.rules.".$name." =".$jsFunction ;
}
public static function ajax(JsUtils $js,$name,$url,$params,$jsCallback,$method="post",$parameters=[]){
$parameters=\array_merge(["async"=>false,"url"=>$url,"params"=>$params,"hasLoader"=>false,"jsCallback"=>$jsCallback,"dataType"=>"json","stopPropagation"=>false,"preventDefault"=>false,"responseElement"=>null],$parameters);
$ajax=new AjaxCall($method, $parameters);
return self::custom($name, "function(value,ruleValue){var result=true;".$ajax->compile($js)."return result;}");
}
}
| phpMv/phpMv-UI | Ajax/semantic/components/validation/Rule.php | PHP | apache-2.0 | 4,448 |
import torch
from deluca.lung.core import Controller, LungEnv
class PIDCorrection(Controller):
def __init__(self, base_controller: Controller, sim: LungEnv, pid_K=[0.0, 0.0], decay=0.1, **kwargs):
self.base_controller = base_controller
self.sim = sim
self.I = 0.0
self.K = pid_K
self.decay = decay
self.reset()
def reset(self):
self.base_controller.reset()
self.sim.reset()
self.I = 0.0
def compute_action(self, state, t):
u_in_base, u_out = self.base_controller(state, t)
err = self.sim.pressure - state
self.I = self.I * (1 - self.decay) + err * self.decay
pid_correction = self.K[0] * err + self.K[1] * self.I
u_in = torch.clamp(u_in_base + pid_correction, min=0.0, max=100.0)
self.sim(u_in, u_out, t)
return u_in, u_out
| google/deluca-lung | deluca/lung/experimental/controllers/_pid_correction.py | Python | apache-2.0 | 874 |
/* ====================================================================
Copyright (C) 2004-2008 fyiReporting Software, LLC
This file is part of the fyiReporting RDL project.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
For additional information, email info@fyireporting.com or visit
the website www.fyiReporting.com.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Drawing;
using System.Data;
using System.Windows.Forms;
using System.Xml;
using System.Text;
using System.IO;
using fyiReporting.RDL;
namespace fyiReporting.RdlDesign
{
/// <summary>
/// DialogValidValues allow user to provide ValidValues: Value and Label lists
/// </summary>
internal class DialogValidValues : System.Windows.Forms.Form
{
private DataTable _DataTable;
private DataGridTextBoxColumn dgtbLabel;
private DataGridTextBoxColumn dgtbValue;
private System.Windows.Forms.DataGridTableStyle dgTableStyle;
private System.Windows.Forms.DataGrid dgParms;
private System.Windows.Forms.Button bOK;
private System.Windows.Forms.Button bCancel;
private System.Windows.Forms.Button bDelete;
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.Container components = null;
internal DialogValidValues(List<ParameterValueItem> list)
{
// This call is required by the Windows.Forms Form Designer.
InitializeComponent();
// Initialize form using the style node values
InitValues(list);
}
internal List<ParameterValueItem> ValidValues
{
get
{
List<ParameterValueItem> list = new List<ParameterValueItem>();
foreach (DataRow dr in _DataTable.Rows)
{
if (dr[0] == DBNull.Value)
continue;
string val = (string) dr[0];
if (val.Length <= 0)
continue;
string label;
if (dr[1] == DBNull.Value)
label = null;
else
label = (string) dr[1];
ParameterValueItem pvi = new ParameterValueItem();
pvi.Value = val;
pvi.Label = label;
list.Add(pvi);
}
return list.Count > 0? list: null;
}
}
private void InitValues(List<ParameterValueItem> list)
{
// Initialize the DataGrid columns
dgtbLabel = new DataGridTextBoxColumn();
dgtbValue = new DataGridTextBoxColumn();
this.dgTableStyle.GridColumnStyles.AddRange(new DataGridColumnStyle[] {
this.dgtbValue,
this.dgtbLabel});
//
// dgtbFE
//
dgtbValue.HeaderText = "Value";
dgtbValue.MappingName = "Value";
dgtbValue.Width = 75;
//
// dgtbValue
//
this.dgtbLabel.HeaderText = "Label";
this.dgtbLabel.MappingName = "Label";
this.dgtbLabel.Width = 75;
// Initialize the DataGrid
//this.dgParms.DataSource = _dsv.QueryParameters;
_DataTable = new DataTable();
_DataTable.Columns.Add(new DataColumn("Value", typeof(string)));
_DataTable.Columns.Add(new DataColumn("Label", typeof(string)));
string[] rowValues = new string[2];
if (list != null)
foreach (ParameterValueItem pvi in list)
{
rowValues[0] = pvi.Value;
rowValues[1] = pvi.Label;
_DataTable.Rows.Add(rowValues);
}
this.dgParms.DataSource = _DataTable;
////
DataGridTableStyle ts = dgParms.TableStyles[0];
ts.GridColumnStyles[0].Width = 140;
ts.GridColumnStyles[1].Width = 140;
}
/// <summary>
/// Clean up any resources being used.
/// </summary>
protected override void Dispose( bool disposing )
{
if( disposing )
{
if(components != null)
{
components.Dispose();
}
}
base.Dispose( disposing );
}
#region Component Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.dgParms = new System.Windows.Forms.DataGrid();
this.dgTableStyle = new System.Windows.Forms.DataGridTableStyle();
this.bOK = new System.Windows.Forms.Button();
this.bCancel = new System.Windows.Forms.Button();
this.bDelete = new System.Windows.Forms.Button();
((System.ComponentModel.ISupportInitialize)(this.dgParms)).BeginInit();
this.SuspendLayout();
//
// dgParms
//
this.dgParms.CaptionVisible = false;
this.dgParms.DataMember = "";
this.dgParms.HeaderForeColor = System.Drawing.SystemColors.ControlText;
this.dgParms.Location = new System.Drawing.Point(8, 8);
this.dgParms.Name = "dgParms";
this.dgParms.Size = new System.Drawing.Size(320, 168);
this.dgParms.TabIndex = 2;
this.dgParms.TableStyles.AddRange(new System.Windows.Forms.DataGridTableStyle[] {
this.dgTableStyle});
//
// dgTableStyle
//
this.dgTableStyle.AllowSorting = false;
this.dgTableStyle.DataGrid = this.dgParms;
this.dgTableStyle.HeaderForeColor = System.Drawing.SystemColors.ControlText;
this.dgTableStyle.MappingName = "";
//
// bOK
//
this.bOK.DialogResult = System.Windows.Forms.DialogResult.OK;
this.bOK.Location = new System.Drawing.Point(216, 192);
this.bOK.Name = "bOK";
this.bOK.TabIndex = 3;
this.bOK.Text = "OK";
//
// bCancel
//
this.bCancel.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.bCancel.Location = new System.Drawing.Point(312, 192);
this.bCancel.Name = "bCancel";
this.bCancel.TabIndex = 4;
this.bCancel.Text = "Cancel";
//
// bDelete
//
this.bDelete.Location = new System.Drawing.Point(336, 16);
this.bDelete.Name = "bDelete";
this.bDelete.Size = new System.Drawing.Size(48, 23);
this.bDelete.TabIndex = 5;
this.bDelete.Text = "Delete";
this.bDelete.Click += new System.EventHandler(this.bDelete_Click);
//
// DialogValidValues
//
this.AcceptButton = this.bOK;
this.AutoScaleBaseSize = new System.Drawing.Size(5, 13);
this.CancelButton = this.bCancel;
this.ClientSize = new System.Drawing.Size(392, 222);
this.ControlBox = false;
this.Controls.Add(this.bDelete);
this.Controls.Add(this.bCancel);
this.Controls.Add(this.bOK);
this.Controls.Add(this.dgParms);
this.MaximizeBox = false;
this.MinimizeBox = false;
this.Name = "DialogValidValues";
this.ShowInTaskbar = false;
this.SizeGripStyle = System.Windows.Forms.SizeGripStyle.Hide;
this.Text = "Valid Values";
((System.ComponentModel.ISupportInitialize)(this.dgParms)).EndInit();
this.ResumeLayout(false);
}
#endregion
private void bDelete_Click(object sender, System.EventArgs e)
{
this._DataTable.Rows.RemoveAt(this.dgParms.CurrentRowIndex);
}
}
}
| gregberns/ZipRdlProjectDev410 | src/RdlDesign/DialogValidValues.cs | C# | apache-2.0 | 7,188 |
package com.huawei.esdk.fusionmanager.local.model.system;
import com.huawei.esdk.fusionmanager.local.model.FMSDKResponse;
/**
* 查询计划任务详情返回信息。
* <p>
* @since eSDK Cloud V100R003C30
*/
public class QueryScheduleTaskDetailResp extends FMSDKResponse
{
/**
* 计划任务。
*/
private ScheduleTask scheduleTask;
public ScheduleTask getScheduleTask()
{
return scheduleTask;
}
public void setScheduleTask(ScheduleTask scheduleTask)
{
this.scheduleTask = scheduleTask;
}
}
| eSDK/esdk_cloud_fm_r3_native_java | source/FM/V1R5/esdk_fm_neadp_1.5_native_java/src/main/java/com/huawei/esdk/fusionmanager/local/model/system/QueryScheduleTaskDetailResp.java | Java | apache-2.0 | 571 |
//Copyright (c) 2014 by Disy Informationssysteme GmbH
package net.disy.eenvplus.tfes.modules.sparql.expression;
import com.hp.hpl.jena.sparql.expr.E_LogicalAnd;
import com.hp.hpl.jena.sparql.expr.E_LogicalOr;
import com.hp.hpl.jena.sparql.expr.Expr;
import com.hp.hpl.jena.sparql.syntax.ElementFilter;
// NOT_PUBLISHED
public class SparqlExpressionBuilder {
private Expr current;
private SparqlExpressionBuilder(Expr expression) {
this.current = expression;
}
public static SparqlExpressionBuilder use(Expr expression) {
return new SparqlExpressionBuilder(expression);
}
public Expr toExpr() {
return current;
}
public ElementFilter toElementFilter() {
return new ElementFilter(current);
}
public SparqlExpressionBuilder and(Expr expression) {
if (expression != null) {
current = new E_LogicalAnd(current, expression);
}
return this;
}
public SparqlExpressionBuilder and(Expr expression, boolean condition) {
if (condition) {
return and(expression);
}
return this;
}
public SparqlExpressionBuilder or(Expr expression) {
if (expression != null) {
current = new E_LogicalOr(current, expression);
}
return this;
}
}
| eENVplus/tf-exploitation-server | TF_Exploitation_Server_modules/src/main/java/net/disy/eenvplus/tfes/modules/sparql/expression/SparqlExpressionBuilder.java | Java | apache-2.0 | 1,223 |
(function (chaiJquery) {
// Module systems magic dance.
if (typeof require === "function" && typeof exports === "object" && typeof module === "object") {
// NodeJS
module.exports = chaiJquery;
} else if (typeof define === "function" && define.amd) {
// AMD
define(function () {
return chaiJquery;
});
} else {
// Other environment (usually <script> tag): pass into global chai
var global = (false || eval)("this");
global.chai.use(chaiJquery);
}
}(function (chai, utils) {
var inspect = utils.inspect,
flag = utils.flag;
jQuery.fn.inspect = function (depth) {
var el = jQuery('<div />').append(this.clone());
if (depth) {
var children = el.children();
while (depth-- > 0)
children = children.children();
children.html('...');
}
return el.html();
};
chai.Assertion.addMethod('attr', function (name, val) {
var actual = flag(this, 'object').attr(name);
if (!flag(this, 'negate') || undefined === val) {
this.assert(
undefined !== actual
, 'expected #{this} to have a #{exp} attribute'
, 'expected #{this} not to have a #{exp} attribute'
, name
);
}
if (undefined !== val) {
this.assert(
val === actual
, 'expected #{this} to have a ' + inspect(name) + ' attribute with the value #{exp}, but the value was #{act}'
, 'expected #{this} not to have a ' + inspect(name) + ' attribute with the value #{act}'
, val
, actual
);
}
flag(this, 'object', actual);
});
chai.Assertion.addMethod('data', function (name, val) {
// Work around a chai bug (https://github.com/logicalparadox/chai/issues/16)
if (flag(this, 'negate') && undefined !== val && undefined === flag(this, 'object').data(name)) {
return;
}
var assertion = new chai.Assertion(flag(this, 'object').data());
if (flag(this, 'negate'))
assertion = assertion.not;
assertion.property(name, val);
});
chai.Assertion.addMethod('class', function (className) {
this.assert(
flag(this, 'object').hasClass(className)
, 'expected #{this} to have class #{exp}'
, 'expected #{this} not to have class #{exp}'
, className
);
});
chai.Assertion.addMethod('id', function (id) {
this.assert(
flag(this, 'object').attr('id') === id
, 'expected #{this} to have id #{exp}'
, 'expected #{this} not to have id #{exp}'
, id
);
});
chai.Assertion.addMethod('html', function (html) {
this.assert(
flag(this, 'object').html() === html
, 'expected #{this} to have HTML #{exp}'
, 'expected #{this} not to have HTML #{exp}'
, html
);
});
chai.Assertion.addMethod('text', function (text) {
this.assert(
flag(this, 'object').text() === text
, 'expected #{this} to have text #{exp}'
, 'expected #{this} not to have text #{exp}'
, text
);
});
chai.Assertion.addMethod('value', function (value) {
this.assert(
flag(this, 'object').val() === value
, 'expected #{this} to have value #{exp}'
, 'expected #{this} not to have value #{exp}'
, value
);
});
jQuery.each(['visible', 'hidden', 'selected', 'checked', 'disabled'], function (i, attr) {
chai.Assertion.addProperty(attr, function () {
this.assert(
flag(this, 'object').is(':' + attr)
, 'expected #{this} to be ' + attr
, 'expected #{this} not to be ' + attr);
});
});
chai.Assertion.overwriteProperty('exist', function (_super) {
return function () {
var obj = flag(this, 'object');
if (obj instanceof jQuery) {
this.assert(
obj.length > 0
, 'expected ' + inspect(obj.selector) + ' to exist'
, 'expected ' + inspect(obj.selector) + ' not to exist');
} else {
_super.apply(this, arguments);
}
};
});
chai.Assertion.overwriteProperty('be', function (_super) {
return function () {
var be = function (selector) {
var obj = flag(this, 'object');
if (obj instanceof jQuery) {
this.assert(
obj.is(selector)
, 'expected #{this} to be #{exp}'
, 'expected #{this} not to be #{exp}'
, selector
);
} else {
_super.apply(this, arguments);
}
};
be.__proto__ = this;
return be;
}
});
chai.Assertion.overwriteMethod('match', function (_super) {
return function (selector) {
var obj = flag(this, 'object');
if (obj instanceof jQuery) {
this.assert(
obj.is(selector)
, 'expected #{this} to match #{exp}'
, 'expected #{this} not to match #{exp}'
, selector
);
} else {
_super.apply(this, arguments);
}
}
});
chai.Assertion.overwriteProperty('contain', function (_super) {
return function () {
_super.call(this);
var contain = function (text) {
var obj = flag(this, 'object');
if (obj instanceof jQuery) {
this.assert(
obj.is(':contains(\'' + text + '\')')
, 'expected #{this} to contain #{exp}'
, 'expected #{this} not to contain #{exp}'
, text
);
} else {
Function.prototype.apply.call(_super.call(this), this, arguments);
}
};
contain.__proto__ = this;
return contain;
}
});
chai.Assertion.overwriteProperty('have', function (_super) {
return function () {
_super.call(this);
var have = function (selector) {
var obj = flag(this, 'object');
if (obj instanceof jQuery) {
this.assert(
// Using find() rather than has() to work around a jQuery bug:
// http://bugs.jquery.com/ticket/11706
obj.find(selector).length > 0
, 'expected #{this} to have #{exp}'
, 'expected #{this} not to have #{exp}'
, selector
);
}
};
have.__proto__ = this;
return have;
}
});
}));
| reaktor/jquery.spinner | test/chai-jquery-1.0.0.js | JavaScript | apache-2.0 | 6,187 |
package util
import "errors"
var (
// ErrNotFound Import or Version was not found.
ErrNotFound = errors.New("Requested resource was not found")
// ErrAlreadyExists Import or Version already exists and cannot be overwritten.
ErrAlreadyExists = errors.New("Resource already exists and cannot be overritten")
// ErrDisabled Import or Version has been disabled and cannot be downloaded.
ErrDisabled = errors.New("Resource disabled")
)
| deejross/dep-registry | util/errors.go | GO | apache-2.0 | 441 |
// Copyright (c) 2015 Illyriad Games Ltd. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.md in the project root for license information.
using System;
using System.Numerics;
namespace IllyriadGames.ByteArrayExtensions
{
public static class VectorizedCopyExtension
{
// Will be Jit'd to consts https://github.com/dotnet/coreclr/issues/1079
private static readonly int _vectorSpan = Vector<byte>.Count;
private static readonly int _vectorSpan2 = Vector<byte>.Count + Vector<byte>.Count;
private static readonly int _vectorSpan3 = Vector<byte>.Count + Vector<byte>.Count + Vector<byte>.Count;
private static readonly int _vectorSpan4 = Vector<byte>.Count + Vector<byte>.Count + Vector<byte>.Count + Vector<byte>.Count;
private const int _longSpan = sizeof(long);
private const int _longSpan2 = sizeof(long) + sizeof(long);
private const int _longSpan3 = sizeof(long) + sizeof(long) + sizeof(long);
private const int _intSpan = sizeof(int);
/// <summary>
/// Copies a specified number of bytes from a source array starting at a particular
/// offset to a destination array starting at a particular offset, not safe for overlapping data.
/// </summary>
/// <param name="src">The source buffer</param>
/// <param name="srcOffset">The zero-based byte offset into src</param>
/// <param name="dst">The destination buffer</param>
/// <param name="dstOffset">The zero-based byte offset into dst</param>
/// <param name="count">The number of bytes to copy</param>
/// <exception cref="ArgumentNullException"><paramref name="src"/> or <paramref name="dst"/> is null</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="srcOffset"/>, <paramref name="dstOffset"/>, or <paramref name="count"/> is less than 0</exception>
/// <exception cref="ArgumentException">
/// The number of bytes in src is less
/// than srcOffset plus count.-or- The number of bytes in dst is less than dstOffset
/// plus count.
/// </exception>
/// <remarks>
/// Code must be optimized, in release mode and <see cref="Vector"/>.IsHardwareAccelerated must be true for the performance benefits.
/// </remarks>
public unsafe static void VectorizedCopy(this byte[] src, int srcOffset, byte[] dst, int dstOffset, int count)
{
#if !DEBUG
// Tests need to check even if IsHardwareAccelerated == false
// Check will be Jitted away https://github.com/dotnet/coreclr/issues/1079
if (Vector.IsHardwareAccelerated)
{
#endif
if (count > 512 + 64)
{
// In-built copy faster for large arrays (vs repeated bounds checks on Vector.ctor?)
Array.Copy(src, srcOffset, dst, dstOffset, count);
return;
}
if (src == null) throw new ArgumentNullException(nameof(src));
if (dst == null) throw new ArgumentNullException(nameof(dst));
if (count < 0 || srcOffset < 0 || dstOffset < 0) throw new ArgumentOutOfRangeException(nameof(count));
if (count == 0) return;
if (srcOffset + count > src.Length) throw new ArgumentException(nameof(src));
if (dstOffset + count > dst.Length) throw new ArgumentException(nameof(dst));
while (count >= _vectorSpan4)
{
new Vector<byte>(src, srcOffset).CopyTo(dst, dstOffset);
new Vector<byte>(src, srcOffset + _vectorSpan).CopyTo(dst, dstOffset + _vectorSpan);
new Vector<byte>(src, srcOffset + _vectorSpan2).CopyTo(dst, dstOffset + _vectorSpan2);
new Vector<byte>(src, srcOffset + _vectorSpan3).CopyTo(dst, dstOffset + _vectorSpan3);
if (count == _vectorSpan4) return;
count -= _vectorSpan4;
srcOffset += _vectorSpan4;
dstOffset += _vectorSpan4;
}
if (count >= _vectorSpan2)
{
new Vector<byte>(src, srcOffset).CopyTo(dst, dstOffset);
new Vector<byte>(src, srcOffset + _vectorSpan).CopyTo(dst, dstOffset + _vectorSpan);
if (count == _vectorSpan2) return;
count -= _vectorSpan2;
srcOffset += _vectorSpan2;
dstOffset += _vectorSpan2;
}
if (count >= _vectorSpan)
{
new Vector<byte>(src, srcOffset).CopyTo(dst, dstOffset);
if (count == _vectorSpan) return;
count -= _vectorSpan;
srcOffset += _vectorSpan;
dstOffset += _vectorSpan;
}
if (count > 0)
{
fixed (byte* srcOrigin = src)
fixed (byte* dstOrigin = dst)
{
var pSrc = srcOrigin + srcOffset;
var dSrc = dstOrigin + dstOffset;
if (count >= _longSpan)
{
var lpSrc = (long*)pSrc;
var ldSrc = (long*)dSrc;
if (count < _longSpan2)
{
count -= _longSpan;
pSrc += _longSpan;
dSrc += _longSpan;
*ldSrc = *lpSrc;
}
else if (count < _longSpan3)
{
count -= _longSpan2;
pSrc += _longSpan2;
dSrc += _longSpan2;
*ldSrc = *lpSrc;
*(ldSrc + 1) = *(lpSrc + 1);
}
else
{
count -= _longSpan3;
pSrc += _longSpan3;
dSrc += _longSpan3;
*ldSrc = *lpSrc;
*(ldSrc + 1) = *(lpSrc + 1);
*(ldSrc + 2) = *(lpSrc + 2);
}
}
if (count >= _intSpan)
{
var ipSrc = (int*)pSrc;
var idSrc = (int*)dSrc;
count -= _intSpan;
pSrc += _intSpan;
dSrc += _intSpan;
*idSrc = *ipSrc;
}
while (count > 0)
{
count--;
*dSrc = *pSrc;
dSrc += 1;
pSrc += 1;
}
}
}
#if !DEBUG
}
else
{
Array.Copy(src, srcOffset, dst, dstOffset, count);
return;
}
#endif
}
}
} | IllyriadGames/ByteArrayExtensions | src/IllyriadGames.ByteArrayExtensions/VectorizedCopyExtension.cs | C# | apache-2.0 | 7,465 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Xunit;
#if USE_FASTQUANT
namespace FastQuant.Tests
#else
namespace SmartQuant.Tests
#endif
{
public class TickSeriesTest
{
[Fact]
public void TestGetIndex()
{
var ts = new TickSeries("test");
for (int i = 0; i < 10; ++i)
ts.Add(new Tick { DateTime = new DateTime(2000, 1, 1, 10, i, 30) });
var firstDt = new DateTime(2000, 1, 1, 10, 3, 30);
var firstTick = new Tick { DateTime = firstDt };
var lastDt = new DateTime(2000, 1, 1, 10, 9, 30);
var lastTick = new Tick { DateTime = lastDt };
// DateTime is in the middle;
Assert.Equal(3, ts.GetIndex(firstDt, IndexOption.Null));
Assert.Equal(-1, ts.GetIndex(new DateTime(2000, 1, 1, 10, 4, 25), IndexOption.Null));
Assert.Equal(4, ts.GetIndex(new DateTime(2000, 1, 1, 10, 4, 30), IndexOption.Null));
Assert.Equal(4, ts.GetIndex(new DateTime(2000, 1, 1, 10, 4, 30), IndexOption.Prev));
Assert.Equal(4, ts.GetIndex(new DateTime(2000, 1, 1, 10, 4, 30), IndexOption.Next));
Assert.Equal(-1, ts.GetIndex(new DateTime(2000, 1, 1, 10, 4, 25), IndexOption.Null));
Assert.Equal(3, ts.GetIndex(new DateTime(2000, 1, 1, 10, 4, 25), IndexOption.Prev));
Assert.Equal(4, ts.GetIndex(new DateTime(2000, 1, 1, 10, 4, 25), IndexOption.Next));
Assert.Equal(-1, ts.GetIndex(new DateTime(2000, 1, 1, 10, 4, 40), IndexOption.Null));
Assert.Equal(4, ts.GetIndex(new DateTime(2000, 1, 1, 10, 4, 40), IndexOption.Prev));
Assert.Equal(5, ts.GetIndex(new DateTime(2000, 1, 1, 10, 4, 40), IndexOption.Next));
// DateTime > LastDateTime
Assert.Equal(5, ts.GetIndex(new DateTime(2000, 1, 1, 10, 4, 40), IndexOption.Next));
Assert.Equal(-1, ts.GetIndex(new DateTime(2000, 1, 1, 10, 11, 30), IndexOption.Null));
Assert.Equal(9, ts.GetIndex(new DateTime(2000, 1, 1, 10, 11, 30), IndexOption.Prev));
Assert.Equal(-1, ts.GetIndex(new DateTime(2000, 1, 1, 10, 11, 30), IndexOption.Next));
// DateTime < FirstDateTime
Assert.Equal(-1, ts.GetIndex(new DateTime(2000, 1, 1, 9, 31, 30), IndexOption.Null));
Assert.Equal(-1, ts.GetIndex(new DateTime(2000, 1, 1, 9, 31, 30), IndexOption.Prev));
Assert.Equal(0, ts.GetIndex(new DateTime(2000, 1, 1, 9, 31, 30), IndexOption.Next));
}
}
}
| fastquant/fastquant.dll | test/FastQuant.Tests.Shared/TickSeriesTest.cs | C# | apache-2.0 | 2,582 |
package com.sectong.util;
import org.apache.log4j.Logger;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.URL;
import java.net.URLConnection;
public class HttpUtil {
private static Logger logger = Logger.getLogger(HttpUtil.class);
private final static int CONNECT_TIMEOUT = 5000; // in milliseconds
private final static String DEFAULT_ENCODING = "UTF-8";
public static String postData(String urlStr, String data){
return postData(urlStr, data, null);
}
public static String postData(String urlStr, String data, String contentType){
BufferedReader reader = null;
try {
URL url = new URL(urlStr);
URLConnection conn = url.openConnection();
conn.setDoOutput(true);
conn.setConnectTimeout(CONNECT_TIMEOUT);
conn.setReadTimeout(CONNECT_TIMEOUT);
if(contentType != null)
conn.setRequestProperty("content-type", contentType);
OutputStreamWriter writer = new OutputStreamWriter(conn.getOutputStream(), DEFAULT_ENCODING);
if(data == null)
data = "";
writer.write(data);
writer.flush();
writer.close();
reader = new BufferedReader(new InputStreamReader(conn.getInputStream(), DEFAULT_ENCODING));
StringBuilder sb = new StringBuilder();
String line = null;
while ((line = reader.readLine()) != null) {
sb.append(line);
sb.append("\r\n");
}
return sb.toString();
} catch (IOException e) {
logger.error("Error connecting to " + urlStr + ": " + e.getMessage());
} finally {
try {
if (reader != null)
reader.close();
} catch (IOException e) {
}
}
return null;
}
}
| xaioyi/yidongyiljwj | src/main/java/com/sectong/util/HttpUtil.java | Java | apache-2.0 | 1,988 |
'use strict';
viewModel.MonitoringAlarm = new Object();
var ma = viewModel.MonitoringAlarm;
ma.minDatetemp = new Date();
ma.maxDatetemp = new Date();
ma.minDateRet = new Date();
ma.maxDateRet = new Date();
vm.currentMenu('Alarm Data');
vm.currentTitle('Alarm Data');
vm.isShowDataAvailability(false);
vm.breadcrumb([
{ title: "Monitoring", href: '#' },
{ title: 'Alarm Data', href: viewModel.appName + 'page/monitoringalarm' }]);
var intervalTurbine = null;
ma.UpdateProjectList = function(project) {
setTimeout(function(){
$('#projectList').data('kendoDropDownList').value(project);
$("#projectList").data("kendoDropDownList").trigger("change");
},1000)
}
ma.UpdateTurbineList = function(turbineList) {
if(turbineList.length == 0){
$("#turbineList").multiselect('selectAll', false).multiselect("refresh");
}else{
$('#turbineList').multiselect("deselectAll", false).multiselect("refresh");
$('#turbineList').multiselect('select', turbineList);
}
}
ma.CreateGrid = function(gridType) {
app.loading(true);
$.when(fa.LoadData()).done(function () {
var COOKIES = {};
var cookieStr = document.cookie;
var param = {
period: fa.period,
dateStart: fa.dateStart,
dateEnd: fa.dateEnd,
turbine: [],
project: "",
tipe: gridType,
};
if(localStorage.getItem("projectname") !== null && localStorage.getItem("turbine") !== null) {
var locTurbine = localStorage.getItem("turbine");
param.turbine = locTurbine == "" ? [] : [locTurbine];
param.project = localStorage.getItem("projectname");
var tabActive = localStorage.getItem("tabActive");
$.when(ma.UpdateProjectList(param.project)).done(function () {
setTimeout(function(){
ma.UpdateTurbineList(param.turbine);
setTimeout(function() {
ma.LoadDataAvail(param.project, gridType);
if(tabActive !== null){
if(tabActive == "alarmRaw" ){
$("#alarmrawTab a:first-child").trigger('click');
}else{
ma.CreateGridAlarm(gridType, param);
}
}
},500);
app.resetLocalStorage();
}, 1500);
});
} else {
param.turbine = fa.turbine();
param.project = fa.project;
ma.CreateGridAlarm(gridType, param);
}
});
}
ma.buildParentFilter = function(filters, additionalFilter) {
$.each(filters, function(idx, val){
if(val.filter !== undefined) {
ma.buildParentFilter(val.filter.filters, additionalFilter)
}
additionalFilter.push(val);
});
}
ma.CreateGridAlarm = function(gridType, param) {
var gridName = "#alarmGrid"
var dt = new Date();
var time = dt.getHours() + "" + dt.getMinutes() + "" + dt.getSeconds();
var nameFile = "Monitoring Alarm Down_"+ moment(new Date()).format("Y-M-D")+"_"+time;
var defaultsort = [ { field: "TimeStart", dir: "desc" }, { field: "TimeEnd", dir: "asc" } ]
var url = viewModel.appName + "monitoringrealtime/getdataalarm";
if(gridType == "warning") {
gridName = "#warningGrid"
nameFile = "Monitoring Alarm Warning";
}
if(gridType == "alarmraw"){
gridName = "#alarmRawGrid"
nameFile = "Monitoring Alarm Raw";
defaultsort = [ { field: "TimeStamp", dir: "desc" } ]
}
var columns = [{
field: "turbine",
title: "Turbine",
attributes: {
style: "text-align:center;"
},
filterable: false,
width: 90
}, {
field: "timestart",
title: "Time Start",
width: 170,
filterable: false,
attributes: {
style: "text-align:center;"
},
template: "#= moment.utc(data.timestart).format('DD-MMM-YYYY') # #=moment.utc(data.timestart).format('HH:mm:ss')#"
}, {
field: "timeend",
title: "Time End",
width: 170,
filterable: false,
attributes: {
style: "text-align:center;"
},
template: "#= (moment.utc(data.timeend).format('DD-MM-YYYY') == '01-01-0001'?'Not yet finished' : (moment.utc(data.timeend).format('DD-MMM-YYYY') # #=moment.utc(data.timeend).format('HH:mm:ss')))#"
}, {
field: "duration",
title: "Duration (hh:mm:ss)",
width: 120,
attributes: {
style: "text-align:center;"
},
filterable: false,
//template: "#= time(data.duration) #"
template: '#= kendo.toString(secondsToTime(data.duration)) #',
}, {
field: "alarmcode",
title: "Alarm Code",
attributes: {
style: "text-align:center;"
},
filterable: {
operators: {
string: {
eq: "Is equal to"
}
},
ui: function(element) {
var form = element.closest("form");
form.find(".k-filter-help-text:first").text("Show items equal to:");
form.find("select").remove();
$("form").find("[data-bind='value:filters[0].value']").addClass('k-textbox');
}
},
width: 90,
},{
field: "alarmdesc",
title: "Description",
width: 330,
}];
if(gridType == "alarm"){
columns.push({
field: "reduceavailability",
title: "Reduce Avail.",
attributes: {
style: "text-align:center;"
},
filterable: false,
width: 90,
});
}
if(gridType == "alarmraw"){
columns = [{
field: "turbine",
title: "Turbine",
attributes: {
style: "text-align:center;"
},
filterable: false,
width: 70
}, {
field: "timestamp",
title: "Timestamp",
width: 120,
attributes: {
style: "text-align:center;"
},
filterable: false,
template: "#= moment.utc(data.timestamp).format('DD-MMM-YYYY') # #=moment.utc(data.timestamp).format('HH:mm:ss')#"
}, {
field: "tag",
title: "Tag",
width: 120,
filterable: false,
attributes: {
style: "text-align:center;"
},
}, {
field: "value",
title: "Value",
attributes: {
style: "text-align:center;"
},
filterable: false,
width: 70,
// template: "#= kendo.toString(data.Timestamp,'n2') #"
}, {
field: "description",
title: "Description",
width: 200
}, {
field: "addinfo",
title: "Note",
filterable: false,
width: 250
}];
}
$(gridName).html('');
$(gridName).kendoGrid({
dataSource: {
serverPaging: true,
serverSorting: true,
serverFiltering: true,
transport: {
read: {
url: url,
type: "POST",
data: param,
dataType: "json",
contentType: "application/json; charset=utf-8",
},
parameterMap: function(options) {
var additionalFilter = [];
if(options.filter !== undefined && options.filter != null) {
ma.buildParentFilter(options.filter.filters, additionalFilter)
}
if (additionalFilter.length > 0) {
options["filter"] = additionalFilter;
}
return JSON.stringify(options);
}
},
schema: {
data: function data(res) {
if (!app.isFine(res)) {
return;
}
var totalFreq = res.data.Total;
var totalHour = res.data.Duration;
ma.minDateRet = new Date(res.data.mindate);
ma.maxDateRet = new Date(res.data.maxdate);
ma.checkCompleteDate()
$('#alarm_duration').text((totalHour/3600).toFixed(2));
$('#alarm_frequency').text(totalFreq);
setTimeout(function(){
app.loading(false);
}, 300)
return res.data.Data;
},
total: function data(res) {
return res.data.Total;
}
},
pageSize: 10,
sort: defaultsort,
},
// toolbar: ["excel"],
excel: {
fileName: nameFile+".xlsx",
filterable: true,
allPages: true
},
// pdf: {
// fileName: nameFile+".pdf",
// },
sortable: true,
pageable: {
refresh: true,
pageSizes: true,
buttonCount: 5
},
filterable: {
extra: false,
operators: {
string: {
contains: "Contains",
eq: "Is equal to"
},
}
},
columns: columns
});
};
function secondsToTime(d) {
d = Number(d);
var h = Math.floor(d / 3600);
var m = Math.floor(d % 3600 / 60);
var s = Math.floor(d % 3600 % 60);
var res = (h > 0 ? (h < 10 ? "0" + h : h) : "00") + ":" + (m > 0 ? (m < 10 ? "0" + m : m) : "00") + ":" + (s > 0 ? s : "00")
return res;
}
function time(s) {
return new Date(s * 1e3).toISOString().slice(-13, -5);
}
ma.InitDateValue = function () {
var maxDateData = new Date();
var lastStartDate = new Date(Date.UTC(moment(maxDateData).get('year'), maxDateData.getMonth(), maxDateData.getDate(), 0, 0, 0, 0));
var lastEndDate = new Date(Date.UTC(moment(maxDateData).get('year'), maxDateData.getMonth(), maxDateData.getDate(), 0, 0, 0, 0));
$('#dateStart').data('kendoDatePicker').value(lastStartDate);
$('#dateEnd').data('kendoDatePicker').value(lastEndDate);
}
ma.LoadDataAvail = function(projectname, gridType){
//fa.LoadData();
var payload = {
project: projectname,
tipe: gridType
};
toolkit.ajaxPost(viewModel.appName + "monitoringrealtime/getdataalarmavaildate", payload, function (res) {
if (!app.isFine(res)) {
return;
}
if (res.data.Data.length == 0) {
res.data.Data = [];
} else {
if (res.data.Data.length > 0) {
ma.minDatetemp = new Date(res.data.Data[0]);
ma.maxDatetemp = new Date(res.data.Data[1]);
app.currentDateData = new Date(res.data.Data[1]);
$('#availabledatestart').html(kendo.toString(moment.utc(ma.minDatetemp).format('DD-MMMM-YYYY')));
$('#availabledateend').html(kendo.toString(moment.utc(ma.maxDatetemp).format('DD-MMMM-YYYY')));
// $('#dateStart').data('kendoDatePicker').value( new Date(Date.UTC(moment( ma.maxDatetemp).get('year'), ma.maxDatetemp.getMonth(), ma.maxDatetemp.getDate() - 7, 0, 0, 0, 0)));
// $('#dateEnd').data('kendoDatePicker').value(kendo.toString(moment.utc(res.data.Data[1]).format('DD-MMM-YYYY')));
ma.checkCompleteDate();
}
}
});
}
ma.checkCompleteDate = function () {
var currentDateData = moment.utc(ma.maxDatetemp).format('YYYY-MM-DD');
var prevDateData = moment.utc(ma.minDatetemp).format('YYYY-MM-DD');
var dateStart = moment.utc(ma.minDateRet).format('YYYY-MM-DD');
var dateEnd = moment.utc(ma.maxDateRet).format('YYYY-MM-DD');
if ((dateEnd > currentDateData) || (dateStart > currentDateData)) {
fa.infoPeriodIcon(true);
} else if ((dateEnd < prevDateData) || (dateStart < prevDateData)) {
fa.infoPeriodIcon(true);
} else {
fa.infoPeriodIcon(false);
fa.infoPeriodRange("");
}
}
ma.ToByProject = function(){
setTimeout(function(){
app.loading(true);
app.resetLocalStorage();
var project = $('#projectList').data('kendoDropDownList').value();
localStorage.setItem('projectname', project);
if(localStorage.getItem("projectname")){
window.location = viewModel.appName + "page/monitoringbyproject";
}
},1500);
}
ma.exportToExcel = function(id){
$("#"+id).getKendoGrid().saveAsExcel();
}
$(document).ready(function(){
$('#btnRefresh').on('click', function () {
fa.checkTurbine();
if($('.nav').find('li.active').find('a.tab-custom').text() == "Alarm Down") {
ma.CreateGrid("alarm");
} else if($('.nav').find('li.active').find('a.tab-custom').text() == "Alarm Warning") {
ma.CreateGrid("warning");
}else{
ma.CreateGrid("alarmraw");
}
});
//setTimeout(function() {
$.when(ma.InitDateValue()).done(function () {
setTimeout(function() {
ma.CreateGrid("alarm");
ma.LoadDataAvail(fa.project, "alarm");
}, 100);
});
//}, 300);
$('#projectList').kendoDropDownList({
change: function () {
var project = $('#projectList').data("kendoDropDownList").value();
fa.populateTurbine(project);
ma.LoadDataAvail(project, "alarm");
}
});
});
| eaciit/windapp | web/assets/core/js/page-monitoring/monitoring-alarm.js | JavaScript | apache-2.0 | 14,310 |
/*
* Copyright 2015 Matthew Timmermans
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nobigsoftware.dfalex;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import static backport.java.util.function.BackportFuncs.computeIfAbsent;
/**
* Turns an NFA into a non-minimal RawDfa by powerset construction
*/
class DfaFromNfa<RESULT> {
//inputs
private final Nfa<RESULT> m_nfa;
private final int[] m_nfaStartStates;
private final int[] m_dfaStartStates;
private final DfaAmbiguityResolver<? super RESULT> m_ambiguityResolver;
//utility
private final DfaStateSignatureCodec m_dfaSigCodec = new DfaStateSignatureCodec();
//These fields are scratch space
private final IntListKey m_tempStateSignature = new IntListKey();
private final ArrayDeque<Integer> m_tempNfaClosureList = new ArrayDeque<>();
private final HashSet<RESULT> m_tempResultSet = new HashSet<RESULT>();
//accumulators
private final HashMap<RESULT, Integer> m_acceptSetMap = new HashMap<>();
private final ArrayList<RESULT> m_acceptSets = new ArrayList<>();
private final HashMap<IntListKey, Integer> m_dfaStateSignatureMap = new HashMap<>();
private final ArrayList<IntListKey> m_dfaStateSignatures = new ArrayList<>();
private final ArrayList<DfaStateInfo> m_dfaStates = new ArrayList<>();
public DfaFromNfa(Nfa<RESULT> nfa, int[] nfaStartStates,
DfaAmbiguityResolver<? super RESULT> ambiguityResolver) {
m_nfa = nfa;
m_nfaStartStates = nfaStartStates;
m_dfaStartStates = new int[nfaStartStates.length];
m_ambiguityResolver = ambiguityResolver;
m_acceptSets.add(null);
_build();
}
public RawDfa<RESULT> getDfa() {
return new RawDfa<>(m_dfaStates, m_acceptSets, m_dfaStartStates);
}
private void _build() {
final CompactIntSubset nfaStateSet = new CompactIntSubset(m_nfa.numStates());
final ArrayList<NfaTransition> dfaStateTransitions = new ArrayList<>();
final ArrayList<NfaTransition> transitionQ = new ArrayList<>(1000);
//Create the DFA start states
for (int i = 0; i < m_dfaStartStates.length; ++i) {
nfaStateSet.clear();
_addNfaStateAndEpsilonsToSubset(nfaStateSet, m_nfaStartStates[i]);
m_dfaStartStates[i] = _getDfaState(nfaStateSet);
}
//Create the transitions and other DFA states.
//m_dfaStateSignatures grows as we discover new states.
//m_dfaStates grows as we complete them
for (int stateNum = 0; stateNum < m_dfaStateSignatures.size(); ++stateNum) {
final IntListKey dfaStateSig = m_dfaStateSignatures.get(stateNum);
dfaStateTransitions.clear();
//For each DFA state, combine the NFA transitions for each
//distinct character range into a DFA transiton, appending new DFA states
//as we discover them.
transitionQ.clear();
//dump all the NFA transitions for the state into the Q
DfaStateSignatureCodec.expand(dfaStateSig,
state -> m_nfa.forStateTransitions(state, transitionQ::add));
//sort all the transitions by first character
Collections.sort(transitionQ, (arg0, arg1) -> {
if (arg0.m_firstChar != arg1.m_firstChar) {
return (arg0.m_firstChar < arg1.m_firstChar ? -1 : 1);
}
return 0;
});
final int tqlen = transitionQ.size();
//first character we haven't accounted for yet
char minc = 0;
//NFA transitions at index < tqstart are no longer relevant
//NFA transitions at index >= tqstart are in first char order OR have first char <= minc
//The sequence of NFA transitions contributing the the previous DFA transition starts here
int tqstart = 0;
//make a range of NFA transitions corresponding to the next DFA transition
while (tqstart < tqlen) {
NfaTransition trans = transitionQ.get(tqstart);
if (trans.m_lastChar < minc) {
++tqstart;
continue;
}
//INVAR - trans contributes to the next DFA transition
nfaStateSet.clear();
_addNfaStateAndEpsilonsToSubset(nfaStateSet, trans.m_stateNum);
char startc = trans.m_firstChar;
char endc = trans.m_lastChar;
if (startc < minc) {
startc = minc;
}
//make range of all transitions that include the start character, removing ones
//that drop out
for (int tqend = tqstart + 1; tqend < tqlen; ++tqend) {
trans = transitionQ.get(tqend);
if (trans.m_lastChar < startc) {
//remove this one
transitionQ.set(tqend, transitionQ.get(tqstart++));
continue;
}
if (trans.m_firstChar > startc) {
//this one is for the next transition
if (trans.m_firstChar <= endc) {
endc = (char) (trans.m_firstChar - 1);
}
break;
}
//this one counts
if (trans.m_lastChar < endc) {
endc = trans.m_lastChar;
}
_addNfaStateAndEpsilonsToSubset(nfaStateSet, trans.m_stateNum);
}
dfaStateTransitions.add(new NfaTransition(startc, endc, _getDfaState(nfaStateSet)));
minc = (char) (endc + 1);
if (minc < endc) {
//wrapped around
break;
}
}
//INVARIANT: m_dfaStatesOut.size() == stateNum
m_dfaStates.add(_createStateInfo(dfaStateSig, dfaStateTransitions));
}
}
//Add an NFA state to m_currentNFASubset, along with the transitive
//closure over its epsilon transitions
private void _addNfaStateAndEpsilonsToSubset(CompactIntSubset dest, int stateNum) {
m_tempNfaClosureList.clear();
if (dest.add(stateNum)) {
m_tempNfaClosureList.add(stateNum);
}
Integer newNfaState;
while ((newNfaState = m_tempNfaClosureList.poll()) != null) {
m_nfa.forStateEpsilons(newNfaState, (Integer src) -> {
if (dest.add(src)) {
m_tempNfaClosureList.add(src);
}
});
}
}
private void _addNfaStateToSignatureCodec(int stateNum) {
if (m_nfa.hasTransitionsOrAccepts(stateNum)) {
m_dfaSigCodec.acceptInt(stateNum);
}
}
//Make a DFA state for a set of simultaneous NFA states
private Integer _getDfaState(CompactIntSubset nfaStateSet) {
//dump state combination into compressed form
m_tempStateSignature.clear();
m_dfaSigCodec.start(m_tempStateSignature::add, nfaStateSet.getSize(),
nfaStateSet.getRange());
nfaStateSet.dumpInOrder(this::_addNfaStateToSignatureCodec);
m_dfaSigCodec.finish();
//make sure it's in the map
Integer dfaStateNum = m_dfaStateSignatureMap.get(m_tempStateSignature);
if (dfaStateNum == null) {
dfaStateNum = m_dfaStateSignatures.size();
IntListKey newSig = new IntListKey(m_tempStateSignature);
m_dfaStateSignatures.add(newSig);
m_dfaStateSignatureMap.put(newSig, dfaStateNum);
}
return dfaStateNum;
}
@SuppressWarnings("unchecked")
private DfaStateInfo _createStateInfo(IntListKey sig, List<NfaTransition> transitions) {
//calculate the set of accepts
m_tempResultSet.clear();
DfaStateSignatureCodec.expand(sig, nfastate -> {
RESULT accept = m_nfa.getAccept(nfastate);
if (accept != null) {
m_tempResultSet.add(accept);
}
});
//and get an accept set index for it
RESULT dfaAccept = null;
if (m_tempResultSet.size() > 1) {
dfaAccept = (RESULT) m_ambiguityResolver.apply(m_tempResultSet);
} else if (!m_tempResultSet.isEmpty()) {
dfaAccept = m_tempResultSet.iterator().next();
}
int acceptSetIndex = 0;
if (dfaAccept != null) {
acceptSetIndex = computeIfAbsent(m_acceptSetMap, dfaAccept, keyset -> {
m_acceptSets.add(keyset);
return m_acceptSets.size() - 1;
});
}
return new DfaStateInfo(transitions, acceptSetIndex);
}
}
| 6thsolution/ApexNLP | dfalex/src/main/java/com/nobigsoftware/dfalex/DfaFromNfa.java | Java | apache-2.0 | 9,517 |
package com.emc.ecs.servicebroker.repository;
import com.emc.ecs.servicebroker.exception.EcsManagementClientException;
import com.emc.ecs.servicebroker.service.s3.S3Service;
import com.emc.ecs.servicebroker.model.Constants;
import com.emc.object.s3.bean.GetObjectResult;
import com.emc.object.s3.bean.ListObjectsResult;
import com.emc.object.s3.bean.S3Object;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
import com.fasterxml.jackson.databind.module.SimpleModule;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cloud.servicebroker.model.binding.SharedVolumeDevice;
import org.springframework.cloud.servicebroker.model.binding.VolumeDevice;
import org.springframework.cloud.servicebroker.model.binding.VolumeMount;
import javax.annotation.PostConstruct;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static java.lang.String.format;
@SuppressWarnings("unused")
public class ServiceInstanceBindingRepository {
static final Logger logger = LoggerFactory.getLogger(ServiceInstanceBindingRepository.class);
public static final String FILENAME_PREFIX = "service-instance-binding";
private final ObjectMapper objectMapper = new ObjectMapper();
{
// NOTE -- ideally we would not need this code, but for now, the VolumeMount class has
// custom serialization that is not matched with corresponding deserialization, so
// deserializing serialized volume mounts doesn't work OOTB.
SimpleModule module = new SimpleModule();
module.addDeserializer(VolumeMount.DeviceType.class, new DeviceTypeDeserializer());
module.addDeserializer(VolumeMount.Mode.class, new ModeDeserializer());
module.addDeserializer(VolumeDevice.class, new VolumeDeviceDeserializer());
objectMapper.registerModule(module);
}
@Autowired
private S3Service s3;
private static String getFilename(String id) {
return FILENAME_PREFIX + "/" + id + ".json";
}
private static boolean isCorrectFilename (String filename) {
return filename.matches(FILENAME_PREFIX + "/.*\\.json");
}
private ServiceInstanceBinding findByFilename(String filename) throws IOException {
if (!isCorrectFilename(filename)) {
String errorMessage = format("Invalid filename of service instance binding provided: %s", filename);
throw new IOException(errorMessage);
}
logger.debug("Loading service instance binding from repository file {}", filename);
GetObjectResult<InputStream> input = s3.getObject(filename);
return objectMapper.readValue(input.getObject(), ServiceInstanceBinding.class);
}
ServiceInstanceBinding removeSecretCredentials(ServiceInstanceBinding binding) {
Map<String, Object> credentials = binding.getCredentials();
credentials.remove(Constants.S3_URL);
credentials.remove(Constants.CREDENTIALS_SECRET_KEY);
binding.setCredentials(credentials);
return binding;
}
@PostConstruct
public void initialize() throws EcsManagementClientException {
logger.info("Service binding file prefix: {}", FILENAME_PREFIX);
}
public void save(ServiceInstanceBinding binding) throws IOException {
String filename = getFilename(binding.getBindingId());
String serialized = objectMapper.writeValueAsString(binding);
s3.putObject(filename, serialized);
}
public ServiceInstanceBinding find(String id) throws IOException {
String filename = getFilename(id);
return findByFilename(filename);
}
public ListServiceInstanceBindingsResponse listServiceInstanceBindings(String marker, int pageSize) throws IOException {
if (pageSize < 0) {
throw new IOException("Page size could not be negative number");
}
List<ServiceInstanceBinding> bindings = new ArrayList<>();
ListObjectsResult list = marker != null ?
s3.listObjects(FILENAME_PREFIX + "/", getFilename(marker), pageSize) :
s3.listObjects(FILENAME_PREFIX + "/", null, pageSize);
for (S3Object s3Object: list.getObjects()) {
String filename = s3Object.getKey();
if (isCorrectFilename(filename)) {
ServiceInstanceBinding binding = findByFilename(filename);
bindings.add(removeSecretCredentials(binding));
}
}
ListServiceInstanceBindingsResponse response = new ListServiceInstanceBindingsResponse(bindings);
response.setMarker(list.getMarker());
response.setPageSize(list.getMaxKeys());
response.setNextMarker(list.getNextMarker());
return response;
}
public void delete(String id) {
String filename = getFilename(id);
s3.deleteObject(filename);
}
public static class ModeDeserializer extends StdDeserializer<VolumeMount.Mode> {
ModeDeserializer() {
this(null);
}
ModeDeserializer(Class<?> vc) {
super(vc);
}
@Override
public VolumeMount.Mode deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException {
JsonNode node = jp.getCodec().readTree(jp);
String s = node.asText();
if (s.equals("rw")) {
return VolumeMount.Mode.READ_WRITE;
} else {
return VolumeMount.Mode.READ_ONLY;
}
}
}
public static class DeviceTypeDeserializer extends StdDeserializer<VolumeMount.DeviceType> {
DeviceTypeDeserializer() {
this(null);
}
DeviceTypeDeserializer(Class<?> vc) {
super(vc);
}
@Override
public VolumeMount.DeviceType deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException {
return VolumeMount.DeviceType.SHARED;
}
}
public static class VolumeDeviceDeserializer extends StdDeserializer<VolumeDevice> {
VolumeDeviceDeserializer() {
this(null);
}
VolumeDeviceDeserializer(Class<?> vc) {
super(vc);
}
@Override
public VolumeDevice deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException {
return jp.getCodec().readValue(jp, SharedVolumeDevice.class);
}
}
} | emccode/ecs-cf-service-broker | src/main/java/com/emc/ecs/servicebroker/repository/ServiceInstanceBindingRepository.java | Java | apache-2.0 | 6,740 |
# -*- coding: UTF-8 -*-
from __future__ import unicode_literals, print_function, division
| dawncold/expenditure-application | expenditure_application/__init__.py | Python | apache-2.0 | 90 |
/*******************************************************************************
* Copyright (C) 2016 Kwaku Twumasi-Afriyie <kwaku.twumasi@quakearts.com>.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Kwaku Twumasi-Afriyie <kwaku.twumasi@quakearts.com> - initial API and implementation
******************************************************************************/
package com.quakearts.webapp.facelets.bootstrap.renderers;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.faces.component.UIColumn;
import javax.faces.component.UIComponent;
import javax.faces.component.UIData;
import javax.faces.context.FacesContext;
import javax.faces.context.ResponseWriter;
import com.quakearts.webapp.facelets.bootstrap.components.BootTable;
import com.quakearts.webapp.facelets.bootstrap.renderkit.Attribute;
import com.quakearts.webapp.facelets.bootstrap.renderkit.AttributeManager;
import com.quakearts.webapp.facelets.bootstrap.renderkit.html_basic.HtmlBasicRenderer;
import com.quakearts.webapp.facelets.util.UtilityMethods;
import static com.quakearts.webapp.facelets.bootstrap.renderkit.RenderKitUtils.*;
public class BootTableRenderer extends HtmlBasicRenderer {
private static final Attribute[] ATTRIBUTES =
AttributeManager.getAttributes(AttributeManager.Key.DATATABLE);
@Override
public void encodeBegin(FacesContext context, UIComponent component)
throws IOException {
if (!shouldEncode(component)) {
return;
}
BootTable data = (BootTable) component;
data.setRowIndex(-1);
ResponseWriter writer = context.getResponseWriter();
writer.startElement("table", component);
writer.writeAttribute("id", component.getClientId(context),
"id");
String styleClass = data.get("styleClass");
writer.writeAttribute("class","table "+(styleClass !=null?" "+styleClass:""), "styleClass");
renderHTML5DataAttributes(context, component);
renderPassThruAttributes(context, writer, component,
ATTRIBUTES);
writer.writeText("\n", component, null);
UIComponent caption = getFacet(component, "caption");
if (caption != null) {
String captionClass = data.get("captionClass");
String captionStyle = data.get("captionStyle");
writer.startElement("caption", component);
if (captionClass != null) {
writer.writeAttribute("class", captionClass, "captionClass");
}
if (captionStyle != null) {
writer.writeAttribute("style", captionStyle, "captionStyle");
}
encodeRecursive(context, caption);
writer.endElement("caption");
}
UIComponent colGroups = getFacet(component, "colgroups");
if (colGroups != null) {
encodeRecursive(context, colGroups);
}
BootMetaInfo info = getMetaInfo(context, component);
UIComponent header = getFacet(component, "header");
if (header != null || info.hasHeaderFacets) {
String headerClass = data.get("headerClass");
writer.startElement("thead", component);
writer.writeText("\n", component, null);
if (header != null) {
writer.startElement("tr", header);
writer.startElement("th", header);
if (headerClass != null) {
writer.writeAttribute("class", headerClass, "headerClass");
}
if (info.columns.size() > 1) {
writer.writeAttribute("colspan",
String.valueOf(info.columns.size()), null);
}
writer.writeAttribute("scope", "colgroup", null);
encodeRecursive(context, header);
writer.endElement("th");
writer.endElement("tr");
writer.write("\n");
}
if (info.hasHeaderFacets) {
writer.startElement("tr", component);
writer.writeText("\n", component, null);
for (UIColumn column : info.columns) {
String columnHeaderClass = info.getCurrentHeaderClass();
writer.startElement("th", column);
if (columnHeaderClass != null) {
writer.writeAttribute("class", columnHeaderClass,
"columnHeaderClass");
} else if (headerClass != null) {
writer.writeAttribute("class", headerClass, "headerClass");
}
writer.writeAttribute("scope", "col", null);
UIComponent facet = getFacet(column, "header");
if (facet != null) {
encodeRecursive(context, facet);
}
writer.endElement("th");
writer.writeText("\n", component, null);
}
writer.endElement("tr");
writer.write("\n");
}
writer.endElement("thead");
writer.writeText("\n", component, null);
}
}
@Override
public void encodeChildren(FacesContext context, UIComponent component)
throws IOException {
if (!shouldEncodeChildren(component)) {
return;
}
UIData data = (UIData) component;
ResponseWriter writer = context.getResponseWriter();
BootMetaInfo info = getMetaInfo(context, data);
if(info.columns.isEmpty()) {
writer.startElement("tbody", component);
renderEmptyTableRow(writer, component);
writer.endElement("tbody");
return;
}
int processed = 0;
int rowIndex = data.getFirst() - 1;
int rows = data.getRows();
List<Integer> bodyRows = getBodyRows(context.getExternalContext().getApplicationMap(), data);
boolean hasBodyRows = (bodyRows != null && !bodyRows.isEmpty());
boolean wroteTableBody = false;
if (!hasBodyRows) {
writer.startElement("tbody", component);
writer.writeText("\n", component, null);
}
boolean renderedRow = false;
while (true) {
if ((rows > 0) && (++processed > rows)) {
break;
}
data.setRowIndex(++rowIndex);
if (!data.isRowAvailable()) {
break;
}
if (hasBodyRows && bodyRows.contains(data.getRowIndex())) {
if (wroteTableBody) {
writer.endElement("tbody");
}
writer.startElement("tbody", data);
wroteTableBody = true;
}
writer.startElement("tr", component);
if (info.rowClasses.length > 0) {
writer.writeAttribute("class", info.getCurrentRowClass(),
"rowClasses");
}
writer.writeText("\n", component, null);
info.newRow();
for (UIColumn column : info.columns) {
boolean isRowHeader = Boolean.TRUE.equals(column.getAttributes()
.get("rowHeader"));
if (isRowHeader) {
writer.startElement("th", column);
writer.writeAttribute("scope", "row", null);
} else {
writer.startElement("td", column);
}
String columnClass = info.getCurrentColumnClass();
if (columnClass != null) {
writer.writeAttribute("class", columnClass, "columnClasses");
}
for (Iterator<UIComponent> gkids = getChildren(column); gkids
.hasNext();) {
encodeRecursive(context, gkids.next());
}
if (isRowHeader) {
writer.endElement("th");
} else {
writer.endElement("td");
}
writer.writeText("\n", component, null);
}
writer.endElement("tr");
writer.write("\n");
renderedRow = true;
}
if(!renderedRow) {
renderEmptyTableRow(writer, data);
}
writer.endElement("tbody");
writer.writeText("\n", component, null);
data.setRowIndex(-1);
}
@Override
public void encodeEnd(FacesContext context, UIComponent component)
throws IOException {
if (!shouldEncode(component)) {
return;
}
ResponseWriter writer = context.getResponseWriter();
BootMetaInfo info = getMetaInfo(context, component);
UIComponent footer = getFacet(component, "footer");
if (footer != null || info.hasFooterFacets) {
String footerClass = (String) component.getAttributes().get("footerClass");
writer.startElement("tfoot", component);
writer.writeText("\n", component, null);
if (info.hasFooterFacets) {
writer.startElement("tr", component);
writer.writeText("\n", component, null);
for (UIColumn column : info.columns) {
String columnFooterClass = (String) column.getAttributes().get(
"footerClass");
writer.startElement("td", column);
if (columnFooterClass != null) {
writer.writeAttribute("class", columnFooterClass,
"columnFooterClass");
} else if (footerClass != null) {
writer.writeAttribute("class", footerClass, "footerClass");
}
UIComponent facet = getFacet(column, "footer");
if (facet != null) {
encodeRecursive(context, facet);
}
writer.endElement("td");
writer.writeText("\n", component, null);
}
writer.endElement("tr");
writer.write("\n");
}
if (footer != null) {
writer.startElement("tr", footer);
writer.startElement("td", footer);
if (footerClass != null) {
writer.writeAttribute("class", footerClass, "footerClass");
}
if (info.columns.size() > 1) {
writer.writeAttribute("colspan",
String.valueOf(info.columns.size()), null);
}
encodeRecursive(context, footer);
writer.endElement("td");
writer.endElement("tr");
writer.write("\n");
}
writer.endElement("tfoot");
writer.writeText("\n", component, null);
}
clearMetaInfo(context, component);
((UIData) component).setRowIndex(-1);
writer.endElement("table");
writer.writeText("\n", component, null);
}
private List<Integer> getBodyRows(Map<String, Object> appMap, UIData data) {
List<Integer> result = null;
String bodyRows = (String) data.getAttributes().get("bodyrows");
if (bodyRows != null) {
String [] rows = UtilityMethods.split(appMap, bodyRows, ",");
if (rows != null) {
result = new ArrayList<Integer>(rows.length);
for (String curRow : rows) {
result.add(Integer.valueOf(curRow));
}
}
}
return result;
}
private void renderEmptyTableRow(final ResponseWriter writer,
final UIComponent component) throws IOException {
writer.startElement("tr", component);
writer.startElement("td", component);
writer.endElement("td");
writer.endElement("tr");
}
protected BootTableRenderer.BootMetaInfo getMetaInfo(FacesContext context,
UIComponent table) {
String key = createKey(table);
Map<Object, Object> attributes = context.getAttributes();
BootMetaInfo info = (BootMetaInfo) attributes
.get(key);
if (info == null) {
info = new BootMetaInfo(table);
attributes.put(key, info);
}
return info;
}
protected void clearMetaInfo(FacesContext context, UIComponent table) {
context.getAttributes().remove(createKey(table));
}
protected String createKey(UIComponent table) {
return BootMetaInfo.KEY + '_' + table.hashCode();
}
private static class BootMetaInfo {
private static final UIColumn PLACE_HOLDER_COLUMN = new UIColumn();
private static final String[] EMPTY_STRING_ARRAY = new String[0];
public static final String KEY = BootMetaInfo.class.getName();
public final String[] rowClasses;
public final String[] columnClasses;
public final String[] headerClasses;
public final List<UIColumn> columns;
public final boolean hasHeaderFacets;
public final boolean hasFooterFacets;
public final int columnCount;
public int columnStyleCounter;
public int headerStyleCounter;
public int rowStyleCounter;
public BootMetaInfo(UIComponent table) {
rowClasses = getRowClasses(table);
columnClasses = getColumnClasses(table);
headerClasses = getHeaderClasses(table);
columns = getColumns(table);
columnCount = columns.size();
hasHeaderFacets = hasFacet("header", columns);
hasFooterFacets = hasFacet("footer", columns);
}
public void newRow() {
columnStyleCounter = 0;
headerStyleCounter = 0;
}
public String getCurrentColumnClass() {
String style = null;
if (columnStyleCounter < columnClasses.length
&& columnStyleCounter <= columnCount) {
style = columnClasses[columnStyleCounter++];
}
return ((style != null && style.length() > 0) ? style : null);
}
public String getCurrentHeaderClass() {
String style = null;
if (headerStyleCounter < headerClasses.length
&& headerStyleCounter <= columnCount) {
style = headerClasses[headerStyleCounter++];
}
return ((style != null && style.length() > 0) ? style : null);
}
public String getCurrentRowClass() {
String style = rowClasses[rowStyleCounter++];
if (rowStyleCounter >= rowClasses.length) {
rowStyleCounter = 0;
}
return style;
}
private static String[] getColumnClasses(UIComponent table) {
String values = ((BootTable) table).get("columnClasses");
if (values == null) {
return EMPTY_STRING_ARRAY;
}
Map<String, Object> appMap = FacesContext.getCurrentInstance()
.getExternalContext().getApplicationMap();
return UtilityMethods.split(appMap, values.trim(), ",");
}
private static String[] getHeaderClasses(UIComponent table) {
String values = ((BootTable) table).get("headerClasses");
if (values == null) {
return EMPTY_STRING_ARRAY;
}
Map<String, Object> appMap = FacesContext.getCurrentInstance()
.getExternalContext().getApplicationMap();
return UtilityMethods.split(appMap, values.trim(), ",");
}
private static List<UIColumn> getColumns(UIComponent table) {
if (table instanceof UIData) {
int childCount = table.getChildCount();
if (childCount > 0) {
List<UIColumn> results = new ArrayList<UIColumn>(childCount);
for (UIComponent kid : table.getChildren()) {
if ((kid instanceof UIColumn) && kid.isRendered()) {
results.add((UIColumn) kid);
}
}
return results;
} else {
return Collections.emptyList();
}
} else {
int count;
Object value = table.getAttributes().get("columns");
if ((value != null) && (value instanceof Integer)) {
count = ((Integer) value);
} else {
count = 2;
}
if (count < 1) {
count = 1;
}
List<UIColumn> result = new ArrayList<UIColumn>(count);
for (int i = 0; i < count; i++) {
result.add(PLACE_HOLDER_COLUMN);
}
return result;
}
}
private static boolean hasFacet(String name, List<UIColumn> columns) {
if (!columns.isEmpty()) {
for (UIColumn column : columns) {
if (column.getFacetCount() > 0) {
if (column.getFacets().containsKey(name)) {
return true;
}
}
}
}
return false;
}
private static String[] getRowClasses(UIComponent table) {
String values = ((BootTable) table).get("rowClasses");
if (values == null) {
return (EMPTY_STRING_ARRAY);
}
Map<String, Object> appMap = FacesContext.getCurrentInstance()
.getExternalContext().getApplicationMap();
return UtilityMethods.split(appMap, values.trim(), ",");
}
}
}
| kwakutwumasi/Quakearts-JSF-Webtools | qa-boot/src/main/java/com/quakearts/webapp/facelets/bootstrap/renderers/BootTableRenderer.java | Java | apache-2.0 | 15,874 |
using Foundation.Data.Hibernate;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Moq;
using NHibernate;
using NHibernate.Context;
using NHibernate.Engine;
namespace Foundation.Tests.Data.Hibernate
{
[TestClass]
public class HibernateUnitOfWorkTests
{
[TestMethod]
public void Constructor_opens_session_from_factory()
{
var session = new Mock<ISession>();
var context = new Mock<CurrentSessionContext>();
var sessionFactory = new Mock<ISessionFactoryImplementor>();
sessionFactory.Setup(factory => factory.OpenSession()).Returns(session.Object);
sessionFactory.Setup(implementor => implementor.CurrentSessionContext).Returns(context.Object);
session.Setup(session2 => session2.SessionFactory).Returns(sessionFactory.Object);
new HibernateUnitOfWork(sessionFactory.Object);
sessionFactory.Verify(factory1 => factory1.OpenSession());
}
[TestMethod]
public void Constructor_sets_opened_session_flush_mode_to_commit()
{
var session = new Mock<ISession>();
var context = new Mock<CurrentSessionContext>();
var sessionFactory = new Mock<ISessionFactoryImplementor>();
sessionFactory.Setup(factory => factory.OpenSession()).Returns(session.Object);
sessionFactory.Setup(implementor => implementor.CurrentSessionContext).Returns(context.Object);
session.Setup(session2 => session2.SessionFactory).Returns(sessionFactory.Object);
var flushMode = FlushMode.Auto;
session.SetupSet(session2 => session2.FlushMode).Callback(mode => flushMode = mode);
new HibernateUnitOfWork(sessionFactory.Object);
session.VerifySet( session1 => session1.FlushMode, Times.Once());
Assert.AreEqual(FlushMode.Commit, flushMode);
}
[TestMethod]
public void Constructor_begins_transaction_from_session()
{
var session = new Mock<ISession>();
var context = new Mock<CurrentSessionContext>();
var sessionFactory = new Mock<ISessionFactoryImplementor>();
sessionFactory.Setup(factory => factory.OpenSession()).Returns(session.Object);
sessionFactory.Setup(implementor => implementor.CurrentSessionContext).Returns(context.Object);
session.Setup(session2 => session2.SessionFactory).Returns(sessionFactory.Object);
new HibernateUnitOfWork(sessionFactory.Object);
session.Verify(session1 => session1.BeginTransaction());
}
[TestMethod]
public void Dispose_also_disposes_of_session()
{
var session = new Mock<ISession>();
var context = new Mock<CurrentSessionContext>();
var sessionFactory = new Mock<ISessionFactoryImplementor>();
sessionFactory.Setup(factory => factory.OpenSession()).Returns(session.Object);
session.Setup(session2 => session2.SessionFactory).Returns(sessionFactory.Object);
sessionFactory.Setup(implementor => implementor.CurrentSessionContext).Returns(context.Object);
var work = new HibernateUnitOfWork(sessionFactory.Object);
work.Dispose();
session.Verify(session1 => session1.Dispose());
}
[TestMethod]
public void Dispose_also_disposes_of_transaction()
{
var session = new Mock<ISession>();
var transaction = new Mock<ITransaction>();
var context = new Mock<CurrentSessionContext>();
var sessionFactory = new Mock<ISessionFactoryImplementor>();
sessionFactory.Setup(factory => factory.OpenSession()).Returns(session.Object);
session.Setup(session2 => session2.SessionFactory).Returns(sessionFactory.Object);
session.Setup(session3 => session3.BeginTransaction()).Returns(transaction.Object);
sessionFactory.Setup(implementor => implementor.CurrentSessionContext).Returns(context.Object);
var work = new HibernateUnitOfWork(sessionFactory.Object);
work.Dispose();
transaction.Verify(transaction1 => transaction1.Dispose());
}
[TestMethod]
public void Commit_also_commits_transaction()
{
var session = new Mock<ISession>();
var transaction = new Mock<ITransaction>();
var context = new Mock<CurrentSessionContext>();
var sessionFactory = new Mock<ISessionFactory>().As<ISessionFactoryImplementor>();
session.Setup(session2 => session2.SessionFactory).Returns(sessionFactory.Object);
session.Setup(session3 => session3.BeginTransaction()).Returns(transaction.Object);
sessionFactory.Setup(factory => factory.OpenSession()).Returns(session.Object);
sessionFactory.Setup(implementor => implementor.CurrentSessionContext).Returns(context.Object);
var work = new HibernateUnitOfWork(sessionFactory.Object);
work.Commit();
transaction.Verify(transaction1 => transaction1.Commit());
}
}
} | DavidMoore/Foundation | Tests/UnitTests/Foundation.Tests/Data/Hibernate/HibernateUnitOfWorkTests.cs | C# | apache-2.0 | 5,383 |
package info.pupcode.model.repo.test;
import org.junit.Before;
import org.springframework.context.support.ClassPathXmlApplicationContext;
/**
* Created by fabientronche1 on 08.11.15.
*/
public class AbstractConcordionFixture {
protected SpringConfigTest springConfigTest;
protected ClassPathXmlApplicationContext applicationContext;
@Before
public void setUp() {
applicationContext = new ClassPathXmlApplicationContext("classpath*:applicationContext.xml");
if (springConfigTest == null) {
springConfigTest = (SpringConfigTest) applicationContext.getBean(SpringConfigTest.class.getName());
}
}
}
| PUPInitiative/pup-code-poc | pup-code-domain/src/test/java/info/pupcode/model/repo/test/AbstractConcordionFixture.java | Java | apache-2.0 | 658 |
/*
* Copyright 2002-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using Spring.Objects.Factory.Config;
namespace Spring.Objects.Factory.Support
{
/// <summary>
/// Programmatic means of constructing a <see cref="IObjectDefinition"/> using the builder pattern. Intended primarily
/// for use when implementing custom namespace parsers.
/// </summary>
/// <remarks>Set methods are used instead of properties, so that chaining of methods can be used to create
/// 'one-liner'definitions that set multiple properties at one.</remarks>
/// <author>Rod Johnson</author>
/// <author>Rob Harrop</author>
/// <author>Juergen Hoeller</author>
/// <author>Mark Pollack (.NET)</author>
public class ObjectDefinitionBuilder
{
private AbstractObjectDefinition objectDefinition;
private IObjectDefinitionFactory objectDefinitionFactory;
private int constructorArgIndex;
/// <summary>
/// Initializes a new instance of the <see cref="ObjectDefinitionBuilder"/> class, private
/// to force use of factory methods.
/// </summary>
private ObjectDefinitionBuilder()
{
}
/// <summary>
/// Creates a new <see cref="ObjectDefinitionBuilder"/> used to construct a <see cref="Spring.Objects.Factory.Support.GenericObjectDefinition"/>.
/// </summary>
public static ObjectDefinitionBuilder GenericObjectDefinition()
{
ObjectDefinitionBuilder builder = new ObjectDefinitionBuilder();
builder.objectDefinition = new GenericObjectDefinition();
return builder;
}
/// <summary>
/// Creates a new <see cref="ObjectDefinitionBuilder"/> used to construct a <see cref="Spring.Objects.Factory.Support.GenericObjectDefinition"/>.
/// </summary>
/// <param name="objectType">the <see cref="Type"/> of the object that the definition is being created for</param>
public static ObjectDefinitionBuilder GenericObjectDefinition(Type objectType)
{
ObjectDefinitionBuilder builder = new ObjectDefinitionBuilder();
builder.objectDefinition = new GenericObjectDefinition();
builder.objectDefinition.ObjectType = objectType;
return builder;
}
/// <summary>
/// Creates a new <see cref="ObjectDefinitionBuilder"/> used to construct a <see cref="Spring.Objects.Factory.Support.GenericObjectDefinition"/>.
/// </summary>
/// <param name="objectTypeName">the name of the <see cref="Type"/> of the object that the definition is being created for</param>
public static ObjectDefinitionBuilder GenericObjectDefinition(string objectTypeName)
{
ObjectDefinitionBuilder builder = new ObjectDefinitionBuilder();
builder.objectDefinition = new GenericObjectDefinition();
builder.objectDefinition.ObjectTypeName = objectTypeName;
return builder;
}
/// <summary>
/// Create a new <code>ObjectDefinitionBuilder</code> used to construct a root object definition.
/// </summary>
/// <param name="objectDefinitionFactory">The object definition factory.</param>
/// <param name="objectTypeName">The type name of the object.</param>
/// <returns>A new <code>ObjectDefinitionBuilder</code> instance.</returns>
public static ObjectDefinitionBuilder RootObjectDefinition(IObjectDefinitionFactory objectDefinitionFactory,
string objectTypeName)
{
return RootObjectDefinition(objectDefinitionFactory, objectTypeName, null);
}
/// <summary>
/// Create a new <code>ObjectDefinitionBuilder</code> used to construct a root object definition.
/// </summary>
/// <param name="objectDefinitionFactory">The object definition factory.</param>
/// <param name="objectTypeName">Name of the object type.</param>
/// <param name="factoryMethodName">Name of the factory method.</param>
/// <returns>A new <code>ObjectDefinitionBuilder</code> instance.</returns>
public static ObjectDefinitionBuilder RootObjectDefinition(IObjectDefinitionFactory objectDefinitionFactory,
string objectTypeName,
string factoryMethodName)
{
ObjectDefinitionBuilder builder = new ObjectDefinitionBuilder();
builder.objectDefinitionFactory = objectDefinitionFactory;
// Pass in null for parent name and also AppDomain to force object definition to be register by name and not type.
builder.objectDefinition =
objectDefinitionFactory.CreateObjectDefinition(objectTypeName, null, null);
builder.objectDefinition.FactoryMethodName = factoryMethodName;
return builder;
}
/// <summary>
/// Create a new <code>ObjectDefinitionBuilder</code> used to construct a root object definition.
/// </summary>
/// <param name="objectDefinitionFactory">The object definition factory.</param>
/// <param name="objectType">Type of the object.</param>
/// <returns>A new <code>ObjectDefinitionBuilder</code> instance.</returns>
public static ObjectDefinitionBuilder RootObjectDefinition(IObjectDefinitionFactory objectDefinitionFactory,
Type objectType)
{
return RootObjectDefinition(objectDefinitionFactory, objectType, null);
}
/// <summary>
/// Create a new <code>ObjectDefinitionBuilder</code> used to construct a root object definition.
/// </summary>
/// <param name="objectDefinitionFactory">The object definition factory.</param>
/// <param name="objectType">Type of the object.</param>
/// <param name="factoryMethodName">Name of the factory method.</param>
/// <returns>A new <code>ObjectDefinitionBuilder</code> instance.</returns>
public static ObjectDefinitionBuilder RootObjectDefinition(IObjectDefinitionFactory objectDefinitionFactory,
Type objectType, string factoryMethodName)
{
ObjectDefinitionBuilder builder = new ObjectDefinitionBuilder();
builder.objectDefinitionFactory = objectDefinitionFactory;
builder.objectDefinition =
objectDefinitionFactory.CreateObjectDefinition(objectType.FullName, null, AppDomain.CurrentDomain);
builder.objectDefinition.ObjectType = objectType;
builder.objectDefinition.FactoryMethodName = factoryMethodName;
return builder;
}
/// <summary>
/// Create a new <code>ObjectDefinitionBuilder</code> used to construct a child object definition..
/// </summary>
/// <param name="objectDefinitionFactory">The object definition factory.</param>
/// <param name="parentObjectName">Name of the parent object.</param>
/// <returns></returns>
public static ObjectDefinitionBuilder ChildObjectDefinition(IObjectDefinitionFactory objectDefinitionFactory,
string parentObjectName)
{
ObjectDefinitionBuilder builder = new ObjectDefinitionBuilder();
builder.objectDefinitionFactory = objectDefinitionFactory;
builder.objectDefinition =
objectDefinitionFactory.CreateObjectDefinition(null, parentObjectName, AppDomain.CurrentDomain);
return builder;
}
/// <summary>
/// Gets the current object definition in its raw (unvalidated) form.
/// </summary>
/// <value>The raw object definition.</value>
public AbstractObjectDefinition RawObjectDefinition
{
get { return objectDefinition; }
}
/// <summary>
/// Validate and gets the object definition.
/// </summary>
/// <value>The object definition.</value>
public AbstractObjectDefinition ObjectDefinition
{
get
{
objectDefinition.Validate();
return objectDefinition;
}
}
//TODO add expression support.
/// <summary>
/// Adds the property value under the given name.
/// </summary>
/// <param name="name">The name.</param>
/// <param name="value">The value.</param>
/// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns>
public ObjectDefinitionBuilder AddPropertyValue(string name, object value)
{
objectDefinition.PropertyValues.Add(new PropertyValue(name, value));
return this;
}
/// <summary>
/// Adds a reference to the specified object name under the property specified.
/// </summary>
/// <param name="name">The name.</param>
/// <param name="objectName">Name of the object.</param>
/// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns>
public ObjectDefinitionBuilder AddPropertyReference(string name, string objectName)
{
objectDefinition.PropertyValues.Add(new PropertyValue(name, new RuntimeObjectReference(objectName)));
return this;
}
/// <summary>
/// Adds an index constructor arg value. The current index is tracked internally and all addtions are
/// at the present point
/// </summary>
/// <param name="value">The constructor arg value.</param>
/// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns>
public ObjectDefinitionBuilder AddConstructorArg(object value)
{
objectDefinition.ConstructorArgumentValues.AddIndexedArgumentValue(constructorArgIndex++,value);
return this;
}
/// <summary>
/// Adds a reference to the named object as a constructor argument.
/// </summary>
/// <param name="objectName">Name of the object.</param>
/// <returns></returns>
public ObjectDefinitionBuilder AddConstructorArgReference(string objectName)
{
return AddConstructorArg(new RuntimeObjectReference(objectName));
}
/// <summary>
/// Sets the name of the factory method to use for this definition.
/// </summary>
/// <param name="factoryMethod">The factory method.</param>
/// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns>
public ObjectDefinitionBuilder SetFactoryMethod(string factoryMethod)
{
objectDefinition.FactoryMethodName = factoryMethod;
return this;
}
/// <summary>
/// Sets the name of the factory object to use for this definition.
/// </summary>
/// <param name="factoryObject">The factory object.</param>
/// <param name="factoryMethod">The factory method.</param>
/// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns>
public ObjectDefinitionBuilder SetFactoryObject(string factoryObject, string factoryMethod)
{
objectDefinition.FactoryObjectName = factoryObject;
objectDefinition.FactoryMethodName = factoryMethod;
return this;
}
/// <summary>
/// Sets whether or not this definition describes a singleton object.
/// </summary>
/// <param name="singleton">if set to <c>true</c> [singleton].</param>
/// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns>
public ObjectDefinitionBuilder SetSingleton(bool singleton)
{
objectDefinition.IsSingleton = singleton;
return this;
}
/// <summary>
/// Sets whether objects or not this definition is abstract.
/// </summary>
/// <param name="flag">if set to <c>true</c> [flag].</param>
/// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns>
public ObjectDefinitionBuilder SetAbstract(bool flag)
{
objectDefinition.IsAbstract = flag;
return this;
}
/// <summary>
/// Sets whether objects for this definition should be lazily initialized or not.
/// </summary>
/// <param name="lazy">if set to <c>true</c> [lazy].</param>
/// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns>
public ObjectDefinitionBuilder SetLazyInit(bool lazy)
{
objectDefinition.IsLazyInit = lazy;
return this;
}
/// <summary>
/// Sets the autowire mode for this definition.
/// </summary>
/// <param name="autowireMode">The autowire mode.</param>
/// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns>
public ObjectDefinitionBuilder SetAutowireMode(AutoWiringMode autowireMode)
{
objectDefinition.AutowireMode = autowireMode;
return this;
}
/// <summary>
/// Sets the autowire candidate value for this definition.
/// </summary>
/// <param name="autowireCandidate">The autowire candidate value</param>
/// <returns></returns>
public ObjectDefinitionBuilder SetAutowireCandidate(bool autowireCandidate)
{
objectDefinition.IsAutowireCandidate = autowireCandidate;
return this;
}
/// <summary>
/// Sets the primary value for this definition.
/// </summary>
/// <param name="primary">If object is primary</param>
/// <returns></returns>
public ObjectDefinitionBuilder SetPrimary(bool primary)
{
objectDefinition.IsPrimary = primary;
return this;
}
/// <summary>
/// Sets the dependency check mode for this definition.
/// </summary>
/// <param name="dependencyCheck">The dependency check.</param>
/// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns>
public ObjectDefinitionBuilder SetDependencyCheck(DependencyCheckingMode dependencyCheck)
{
objectDefinition.DependencyCheck = dependencyCheck;
return this;
}
/// <summary>
/// Sets the name of the destroy method for this definition.
/// </summary>
/// <param name="methodName">Name of the method.</param>
/// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns>
public ObjectDefinitionBuilder SetDestroyMethodName(string methodName)
{
objectDefinition.DestroyMethodName = methodName;
return this;
}
/// <summary>
/// Sets the name of the init method for this definition.
/// </summary>
/// <param name="methodName">Name of the method.</param>
/// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns>
public ObjectDefinitionBuilder SetInitMethodName(string methodName)
{
objectDefinition.InitMethodName = methodName;
return this;
}
/// <summary>
/// Sets the resource description for this definition.
/// </summary>
/// <param name="resourceDescription">The resource description.</param>
/// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns>
public ObjectDefinitionBuilder SetResourceDescription(string resourceDescription)
{
objectDefinition.ResourceDescription = resourceDescription;
return this;
}
/// <summary>
/// Adds the specified object name to the list of objects that this definition depends on.
/// </summary>
/// <param name="objectName">Name of the object.</param>
/// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns>
public ObjectDefinitionBuilder AddDependsOn(string objectName)
{
if (objectDefinition.DependsOn == null)
{
objectDefinition.DependsOn = new[] {objectName};
}
else
{
var list = new List<string>(objectDefinition.DependsOn.Count + 1);
list.AddRange(objectDefinition.DependsOn);
list.Add(objectName);
objectDefinition.DependsOn = list;
}
return this;
}
}
} | spring-projects/spring-net | src/Spring/Spring.Core/Objects/Factory/Support/ObjectDefinitionBuilder.cs | C# | apache-2.0 | 17,503 |
using System;
using Windows.ApplicationModel.Resources;
namespace Okra.Data.Helpers
{
internal static class ResourceHelper
{
// *** Constants ***
private const string RESOURCEMAP_ERROR = "Okra.Data/Errors";
private const string RESOURCE_NOT_FOUND_ERROR = "Exception_ArgumentException_ResourceStringNotFound";
// *** Static Fields ***
private static ResourceLoader errorResourceLoader;
// *** Methods ***
public static string GetErrorResource(string resourceName)
{
if (errorResourceLoader == null)
errorResourceLoader = ResourceLoader.GetForViewIndependentUse(RESOURCEMAP_ERROR);
string errorResource = errorResourceLoader.GetString(resourceName);
if (string.IsNullOrEmpty(errorResource) && resourceName != RESOURCE_NOT_FOUND_ERROR)
throw new ArgumentException(GetErrorResource(RESOURCE_NOT_FOUND_ERROR));
return errorResource;
}
}
}
| OkraFramework/Okra.Data | src/Okra.Data/Helpers/ResourceHelper.cs | C# | apache-2.0 | 1,010 |
package pro.luxun.luxunanimation.presenter.adapter;
import android.support.annotation.UiThread;
import android.support.v7.widget.RecyclerView;
import android.view.View;
import android.view.ViewGroup;
import java.util.ArrayList;
import java.util.List;
/**
* Created by wufeiyang on 16/5/7.
*/
public abstract class BaseRecyclerAdapter<T, V extends View> extends RecyclerView.Adapter<BaseRecyclerAdapter.BaseViewHolder<V>> {
protected List<T> mItems = new ArrayList<>();
@Override
public BaseViewHolder<V> onCreateViewHolder(ViewGroup parent, int viewType) {
return new BaseViewHolder<>(onCreateItemView(parent, viewType));
}
@Override
public int getItemCount() {
return mItems.size();
}
@UiThread
public void refresh(List<T> datas){
mItems.clear();
add(datas);
}
@UiThread
public void add(List<T> datas){
mItems.addAll(datas);
notifyDataSetChanged();
}
@Override
public void onBindViewHolder(BaseViewHolder holder, int position) {
V v = (V) holder.itemView;
onBindView(v, mItems.get(position));
}
protected abstract V onCreateItemView(ViewGroup parent, int viewType);
protected abstract void onBindView(V v, T t);
public static class BaseViewHolder<V extends View> extends RecyclerView.ViewHolder{
public BaseViewHolder(V itemView) {
super(itemView);
}
}
}
| ayaseruri/luxunPro | app/src/main/java/pro/luxun/luxunanimation/presenter/adapter/BaseRecyclerAdapter.java | Java | apache-2.0 | 1,444 |
package cn.xmut.experiment.service.impl;
import java.util.List;
import org.apache.commons.fileupload.FileItem;
import cn.xmut.experiment.dao.IExperimentDao;
import cn.xmut.experiment.dao.impl.jdbc.ExperimentDaoImpl;
import cn.xmut.experiment.domain.Experiment;
import cn.xmut.experiment.domain.ShowExperiment;
import cn.xmut.experiment.service.IExperimentService;
public class ExperimentServiceImpl implements IExperimentService {
IExperimentDao experimentDao = new ExperimentDaoImpl();
public boolean addExperiment(Experiment experiment, String docName,String dirPath, FileItem fileItem) {
return experimentDao.addExperiment(experiment, docName, dirPath, fileItem);
}
public boolean updateExperiment(Experiment experiment) {
return experimentDao.updateExperiment(experiment);
}
public String getDocPath(int experimentId) {
return experimentDao.getDocPath(experimentId);
}
public Experiment getExperiment(int experimentId) {
return experimentDao.getExperiment(experimentId);
}
public List<ShowExperiment> queryPass(Experiment experiment) {
return experimentDao.queryPass(experiment);
}
public List<ShowExperiment> queryNodistribute(Experiment experiment) {
return experimentDao.queryNodistribute(experiment);
}
public List<ShowExperiment> expertQueryNoExtimate(Experiment experiment, String expertId) {
return experimentDao.expertQueryNoExtimate(experiment, expertId);
}
public List<ShowExperiment> managerQueryNoExtimate(Experiment experiment) {
return experimentDao.managerQueryNoExtimate(experiment);
}
public List<ShowExperiment> managerQueryNoPass(Experiment experiment) {
return experimentDao.managerQueryNoPass(experiment);
}
public boolean delExperiment(Experiment experiment) {
return experimentDao.delExperiment(experiment);
}
public List<ShowExperiment> headmanQueryNoPass(Experiment experiment) {
return experimentDao.headmanQueryNoPass(experiment);
}
}
| bingoogolapple/J2EENote | experiment/src/cn/xmut/experiment/service/impl/ExperimentServiceImpl.java | Java | apache-2.0 | 1,988 |
/* Yet Another Forum.NET
* Copyright (C) 2003-2005 Bjørnar Henden
* Copyright (C) 2006-2013 Jaben Cargman
* Copyright (C) 2014-2020 Ingo Herbote
* https://www.yetanotherforum.net/
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* https://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
namespace YAF.Utils.Helpers
{
using System.Text.RegularExpressions;
/// <summary>
/// The URL helper.
/// </summary>
public static class UrlHelper
{
/// <summary>
/// Counts the URLs.
/// </summary>
/// <param name="message">The message.</param>
/// <returns>Returns how many URLs the message contains</returns>
public static int CountUrls(string message)
{
return
Regex.Matches(
message,
@"((http|ftp|https):\/\/[\w\-_]+(\.[\w\-_]+)+([\w\-\.,@?^=%&:/~\+#]*[\w\-\@?^=%&/~\+#])?)")
.Count;
}
}
} | Pathfinder-Fr/YAFNET | yafsrc/YAF.Utils/Helpers/UrlHelper.cs | C# | apache-2.0 | 1,676 |
/**
* @file
* Declares the any type.
*/
#pragma once
#include "../values/forward.hpp"
#include <ostream>
namespace puppet { namespace runtime { namespace types {
// Forward declaration of recursion_guard
struct recursion_guard;
/**
* Represents the Puppet Any type.
*/
struct any
{
/**
* Gets the name of the type.
* @return Returns the name of the type (i.e. Any).
*/
static char const* name();
/**
* Creates a generalized version of the type.
* @return Returns the generalized type.
*/
values::type generalize() const;
/**
* Determines if the given value is an instance of this type.
* @param value The value to determine if it is an instance of this type.
* @param guard The recursion guard to use for aliases.
* @return Returns true if the given value is an instance of this type or false if not.
*/
bool is_instance(values::value const& value, recursion_guard& guard) const;
/**
* Determines if the given type is assignable to this type.
* @param other The other type to check for assignability.
* @param guard The recursion guard to use for aliases.
* @return Returns true if the given type is assignable to this type or false if the given type is not assignable to this type.
*/
bool is_assignable(values::type const& other, recursion_guard& guard) const;
/**
* Writes a representation of the type to the given stream.
* @param stream The stream to write to.
* @param expand True to specify that type aliases should be expanded or false if not.
*/
void write(std::ostream& stream, bool expand = true) const;
};
/**
* Stream insertion operator for any type.
* @param os The output stream to write the type to.
* @param type The type to write.
* @return Returns the given output stream.
*/
std::ostream& operator<<(std::ostream& os, any const& type);
/**
* Equality operator for any.
* @param left The left type to compare.
* @param right The right type to compare.
* @return Always returns true (Any type is always equal to Any).
*/
bool operator==(any const& left, any const& right);
/**
* Inequality operator for any.
* @param left The left type to compare.
* @param right The right type to compare.
* @return Always returns false (Any type is always equal to Any).
*/
bool operator!=(any const& left, any const& right);
/**
* Hashes the any type.
* @param type The any type to hash.
* @return Returns the hash value for the type.
*/
size_t hash_value(any const& type);
}}} // namespace puppet::runtime::types
| peterhuene/puppetcpp | lib/include/puppet/runtime/types/any.hpp | C++ | apache-2.0 | 2,856 |
package server
import (
"bytes"
"context"
"encoding/json"
"io/ioutil"
"log"
"net/http"
"strings"
"sync"
"sync/atomic"
"text/template"
"time"
"github.com/TV4/graceful"
"github.com/gogap/config"
"github.com/gogap/go-wkhtmltox/wkhtmltox"
"github.com/gorilla/mux"
"github.com/phyber/negroni-gzip/gzip"
"github.com/rs/cors"
"github.com/spf13/cast"
"github.com/urfave/negroni"
)
const (
defaultTemplateText = `{"code":{{.Code}},"message":"{{.Message}}"{{if .Result}},"result":{{.Result|jsonify}}{{end}}}`
)
var (
htmlToX *wkhtmltox.WKHtmlToX
renderTmpls = make(map[string]*template.Template)
defaultTmpl *template.Template
)
type ConvertData struct {
Data []byte `json:"data"`
}
type ConvertArgs struct {
To string `json:"to"`
Fetcher wkhtmltox.FetcherOptions `json:"fetcher"`
Converter json.RawMessage `json:"converter"`
Template string `json:"template"`
}
type TemplateArgs struct {
To string
ConvertResponse
Response *RespHelper
}
type ConvertResponse struct {
Code int `json:"code"`
Message string `json:"message"`
Result interface{} `json:"result"`
}
type serverWrapper struct {
tls bool
certFile string
keyFile string
reqNumber int64
addr string
n *negroni.Negroni
timeout time.Duration
}
func (p *serverWrapper) ServeHTTP(w http.ResponseWriter, r *http.Request) {
atomic.AddInt64(&p.reqNumber, 1)
defer atomic.AddInt64(&p.reqNumber, -1)
p.n.ServeHTTP(w, r)
}
func (p *serverWrapper) ListenAndServe() (err error) {
if p.tls {
err = http.ListenAndServeTLS(p.addr, p.certFile, p.keyFile, p)
} else {
err = http.ListenAndServe(p.addr, p)
}
return
}
func (p *serverWrapper) Shutdown(ctx context.Context) error {
num := atomic.LoadInt64(&p.reqNumber)
schema := "HTTP"
if p.tls {
schema = "HTTPS"
}
beginTime := time.Now()
for num > 0 {
time.Sleep(time.Second)
timeDiff := time.Now().Sub(beginTime)
if timeDiff > p.timeout {
break
}
}
log.Printf("[%s] Shutdown finished, Address: %s\n", schema, p.addr)
return nil
}
type WKHtmlToXServer struct {
conf config.Configuration
servers []*serverWrapper
}
func New(conf config.Configuration) (srv *WKHtmlToXServer, err error) {
serviceConf := conf.GetConfig("service")
wkHtmlToXConf := conf.GetConfig("wkhtmltox")
htmlToX, err = wkhtmltox.New(wkHtmlToXConf)
if err != nil {
return
}
// init templates
defaultTmpl, err = template.New("default").Funcs(funcMap).Parse(defaultTemplateText)
if err != nil {
return
}
err = loadTemplates(
serviceConf.GetConfig("templates"),
)
if err != nil {
return
}
// init http server
c := cors.New(
cors.Options{
AllowedOrigins: serviceConf.GetStringList("cors.allowed-origins"),
AllowedMethods: serviceConf.GetStringList("cors.allowed-methods"),
AllowedHeaders: serviceConf.GetStringList("cors.allowed-headers"),
ExposedHeaders: serviceConf.GetStringList("cors.exposed-headers"),
AllowCredentials: serviceConf.GetBoolean("cors.allow-credentials"),
MaxAge: int(serviceConf.GetInt64("cors.max-age")),
OptionsPassthrough: serviceConf.GetBoolean("cors.options-passthrough"),
Debug: serviceConf.GetBoolean("cors.debug"),
},
)
r := mux.NewRouter()
pathPrefix := serviceConf.GetString("path", "/")
r.PathPrefix(pathPrefix).Path("/convert").
Methods("POST").
HandlerFunc(handleHtmlToX)
r.PathPrefix(pathPrefix).Path("/ping").
Methods("GET", "HEAD").HandlerFunc(
func(rw http.ResponseWriter, req *http.Request) {
rw.Header().Set("Content-Type", "text/plain; charset=utf-8")
rw.Write([]byte("pong"))
},
)
n := negroni.Classic()
n.Use(c) // use cors
if serviceConf.GetBoolean("gzip-enabled", true) {
n.Use(gzip.Gzip(gzip.DefaultCompression))
}
n.UseHandler(r)
gracefulTimeout := serviceConf.GetTimeDuration("graceful.timeout", time.Second*3)
enableHTTP := serviceConf.GetBoolean("http.enabled", true)
enableHTTPS := serviceConf.GetBoolean("https.enabled", false)
var servers []*serverWrapper
if enableHTTP {
listenAddr := serviceConf.GetString("http.address", "127.0.0.1:8080")
httpServer := &serverWrapper{
n: n,
timeout: gracefulTimeout,
addr: listenAddr,
}
servers = append(servers, httpServer)
}
if enableHTTPS {
listenAddr := serviceConf.GetString("http.address", "127.0.0.1:443")
certFile := serviceConf.GetString("https.cert")
keyFile := serviceConf.GetString("https.key")
httpsServer := &serverWrapper{
n: n,
timeout: gracefulTimeout,
addr: listenAddr,
tls: true,
certFile: certFile,
keyFile: keyFile,
}
servers = append(servers, httpsServer)
}
srv = &WKHtmlToXServer{
conf: conf,
servers: servers,
}
return
}
func (p *WKHtmlToXServer) Run() (err error) {
wg := sync.WaitGroup{}
wg.Add(len(p.servers))
for i := 0; i < len(p.servers); i++ {
go func(srv *serverWrapper) {
defer wg.Done()
shcema := "HTTP"
if srv.tls {
shcema = "HTTPS"
}
log.Printf("[%s] Listening on %s\n", shcema, srv.addr)
graceful.ListenAndServe(srv)
}(p.servers[i])
}
wg.Wait()
return
}
func writeResp(rw http.ResponseWriter, convertArgs ConvertArgs, resp ConvertResponse) {
var tmpl *template.Template
if len(convertArgs.Template) == 0 {
tmpl = defaultTmpl
} else {
var exist bool
tmpl, exist = renderTmpls[convertArgs.Template]
if !exist {
tmpl = defaultTmpl
}
}
respHelper := newRespHelper(rw)
args := TemplateArgs{
To: convertArgs.To,
ConvertResponse: resp,
Response: respHelper,
}
buf := bytes.NewBuffer(nil)
err := tmpl.Execute(buf, args)
if err != nil {
log.Println(err)
}
if !respHelper.Holding() {
rw.Write(buf.Bytes())
}
}
func handleHtmlToX(rw http.ResponseWriter, req *http.Request) {
decoder := json.NewDecoder(req.Body)
decoder.UseNumber()
args := ConvertArgs{}
err := decoder.Decode(&args)
if err != nil {
writeResp(rw, args, ConvertResponse{http.StatusBadRequest, err.Error(), nil})
return
}
if len(args.Converter) == 0 {
writeResp(rw, args, ConvertResponse{http.StatusBadRequest, "converter is nil", nil})
return
}
to := strings.ToUpper(args.To)
var opts wkhtmltox.ConvertOptions
if to == "IMAGE" {
opts = &wkhtmltox.ToImageOptions{}
} else if to == "PDF" {
opts = &wkhtmltox.ToPDFOptions{}
} else {
writeResp(rw, args, ConvertResponse{http.StatusBadRequest, "argument of to is illegal (image|pdf)", nil})
return
}
err = json.Unmarshal(args.Converter, opts)
if err != nil {
writeResp(rw, args, ConvertResponse{http.StatusBadRequest, err.Error(), nil})
return
}
var convData []byte
convData, err = htmlToX.Convert(args.Fetcher, opts)
if err != nil {
writeResp(rw, args, ConvertResponse{http.StatusBadRequest, err.Error(), nil})
return
}
writeResp(rw, args, ConvertResponse{0, "", ConvertData{Data: convData}})
return
}
func loadTemplates(tmplsConf config.Configuration) (err error) {
if tmplsConf == nil {
return
}
tmpls := tmplsConf.Keys()
for _, name := range tmpls {
file := tmplsConf.GetString(name + ".template")
tmpl := template.New(name).Funcs(funcMap)
var data []byte
data, err = ioutil.ReadFile(file)
if err != nil {
return
}
tmpl, err = tmpl.Parse(string(data))
if err != nil {
return
}
renderTmpls[name] = tmpl
}
return
}
type RespHelper struct {
rw http.ResponseWriter
hold bool
}
func newRespHelper(rw http.ResponseWriter) *RespHelper {
return &RespHelper{
rw: rw,
hold: false,
}
}
func (p *RespHelper) SetHeader(key, value interface{}) error {
k := cast.ToString(key)
v := cast.ToString(value)
p.rw.Header().Set(k, v)
return nil
}
func (p *RespHelper) Hold(v interface{}) error {
h := cast.ToBool(v)
p.hold = h
return nil
}
func (p *RespHelper) Holding() bool {
return p.hold
}
func (p *RespHelper) Write(data []byte) error {
p.rw.Write(data)
return nil
}
func (p *RespHelper) WriteHeader(code interface{}) error {
c, err := cast.ToIntE(code)
if err != nil {
return err
}
p.rw.WriteHeader(c)
return nil
}
| gogap/go-wkhtmltox | server/server.go | GO | apache-2.0 | 8,166 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.collection.primitive;
public interface PrimitiveLongIntVisitor<E extends Exception>
{
/**
* Visit the given entry.
*
* @param key The key of the entry.
* @param value The value of the entry.
* @return 'true' to signal that the iteration should be stopped, 'false' to signal that the iteration should
* continue if there are more entries to look at.
* @throws E any thrown exception of type 'E' will bubble up through the 'visit' method.
*/
boolean visited( long key, int value ) throws E;
}
| HuangLS/neo4j | community/primitive-collections/src/main/java/org/neo4j/collection/primitive/PrimitiveLongIntVisitor.java | Java | apache-2.0 | 1,354 |
package org.anyline.entity;
import com.fasterxml.jackson.databind.JsonNode;
import org.anyline.util.*;
import org.anyline.util.regular.Regular;
import org.anyline.util.regular.RegularUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.*;
public class DataSet implements Collection<DataRow>, Serializable {
private static final long serialVersionUID = 6443551515441660101L;
protected static final Logger log = LoggerFactory.getLogger(DataSet.class);
private boolean result = true; // 执行结果
private Exception exception = null; // 异常
private String message = null; // 提示信息
private PageNavi navi = null; // 分页
private List<String> head = null; // 表头
private List<DataRow> rows = null; // 数据
private List<String> primaryKeys = null; // 主键
private String datalink = null; // 数据连接
private String dataSource = null; // 数据源(表|视图|XML定义SQL)
private String schema = null;
private String table = null;
private long createTime = 0; //创建时间
private long expires = -1; //过期时间(毫秒) 从创建时刻计时expires毫秒后过期
private boolean isFromCache = false; //是否来自缓存
private boolean isAsc = false;
private boolean isDesc = false;
private Map<String, Object> queryParams = new HashMap<String, Object>();//查询条件
/**
* 创建索引
*
* @param key key
* @return return
* crateIndex("ID");
* crateIndex("ID:ASC");
*/
public DataSet creatIndex(String key) {
return this;
}
public DataSet() {
rows = new ArrayList<DataRow>();
createTime = System.currentTimeMillis();
}
public DataSet(List<Map<String, Object>> list) {
rows = new ArrayList<DataRow>();
if (null == list)
return;
for (Map<String, Object> map : list) {
DataRow row = new DataRow(map);
rows.add(row);
}
}
public static DataSet build(Collection<?> list, String ... fields) {
return parse(list, fields);
}
/**
* list解析成DataSet
* @param list list
* @param fields 如果list是二维数据
* fields 下标对应的属性(字段/key)名称 如"ID","CODE","NAME"
* 如果不输入则以下标作为DataRow的key 如row.put("0","100").put("1","A01").put("2","张三");
* 如果属性数量超出list长度,取null值存入DataRow
*
* 如果list是一组数组
* fileds对应条目的属性值 如果不输入 则以条目的属性作DataRow的key 如"USER_ID:id","USER_NM:name"
*
* @return DataSet
*/
public static DataSet parse(Collection<?> list, String ... fields) {
DataSet set = new DataSet();
if (null != list) {
for (Object obj : list) {
DataRow row = null;
if(obj instanceof Collection){
row = DataRow.parseList((Collection)obj, fields);
}else {
row = DataRow.parse(obj, fields);
}
set.add(row);
}
}
return set;
}
public static DataSet parseJson(DataRow.KEY_CASE keyCase, String json) {
if (null != json) {
try {
return parseJson(keyCase, BeanUtil.JSON_MAPPER.readTree(json));
} catch (Exception e) {
}
}
return null;
}
public static DataSet parseJson(String json) {
return parseJson(DataRow.KEY_CASE.CONFIG, json);
}
public static DataSet parseJson(DataRow.KEY_CASE keyCase, JsonNode json) {
DataSet set = new DataSet();
if (null != json) {
if (json.isArray()) {
Iterator<JsonNode> items = json.iterator();
while (items.hasNext()) {
JsonNode item = items.next();
set.add(DataRow.parseJson(keyCase, item));
}
}
}
return set;
}
public static DataSet parseJson(JsonNode json) {
return parseJson(DataRow.KEY_CASE.CONFIG, json);
}
public DataSet Camel(){
for(DataRow row:rows){
row.Camel();
}
return this;
}
public DataSet camel(){
for(DataRow row:rows){
row.camel();
}
return this;
}
public DataSet setIsNew(boolean bol) {
for (DataRow row : rows) {
row.setIsNew(bol);
}
return this;
}
/**
* 移除每个条目中指定的key
*
* @param keys keys
* @return DataSet
*/
public DataSet remove(String... keys) {
for (DataRow row : rows) {
for (String key : keys) {
row.remove(key);
}
}
return this;
}
public DataSet trim(){
for(DataRow row:rows){
row.trim();
}
return this;
}
/**
* 添加主键
*
* @param applyItem 是否应用到集合中的DataRow 默认true
* @param pks pks
* @return return
*/
public DataSet addPrimaryKey(boolean applyItem, String... pks) {
if (null != pks) {
List<String> list = new ArrayList<>();
for (String pk : pks) {
list.add(pk);
}
addPrimaryKey(applyItem, list);
}
return this;
}
public DataSet addPrimaryKey(String... pks) {
return addPrimaryKey(true, pks);
}
public DataSet addPrimaryKey(boolean applyItem, Collection<String> pks) {
if (null == primaryKeys) {
primaryKeys = new ArrayList<>();
}
if (null == pks) {
return this;
}
for (String pk : pks) {
if (BasicUtil.isEmpty(pk)) {
continue;
}
pk = key(pk);
if (!primaryKeys.contains(pk)) {
primaryKeys.add(pk);
}
}
if (applyItem) {
for (DataRow row : rows) {
row.setPrimaryKey(false, primaryKeys);
}
}
return this;
}
public DataSet addPrimaryKey(Collection<String> pks) {
return addPrimaryKey(true, pks);
}
/**
* 设置主键
*
* @param applyItem applyItem
* @param pks pks
* @return return
*/
public DataSet setPrimaryKey(boolean applyItem, String... pks) {
if (null != pks) {
List<String> list = new ArrayList<>();
for (String pk : pks) {
list.add(pk);
}
setPrimaryKey(applyItem, list);
}
return this;
}
public DataSet setPrimaryKey(String... pks) {
return setPrimaryKey(true, pks);
}
public DataSet setPrimaryKey(boolean applyItem, Collection<String> pks) {
if (null == pks) {
return this;
}
this.primaryKeys = new ArrayList<>();
addPrimaryKey(applyItem, pks);
return this;
}
public DataSet setPrimaryKey(Collection<String> pks) {
return setPrimaryKey(true, pks);
}
public DataSet set(int index, DataRow item) {
rows.set(index, item);
return this;
}
/**
* 是否有主键
*
* @return return
*/
public boolean hasPrimaryKeys() {
if (null != primaryKeys && primaryKeys.size() > 0) {
return true;
} else {
return false;
}
}
/**
* 提取主键
*
* @return return
*/
public List<String> getPrimaryKeys() {
if (null == primaryKeys) {
primaryKeys = new ArrayList<>();
}
return primaryKeys;
}
/**
* 添加表头
*
* @param col col
* @return return
*/
public DataSet addHead(String col) {
if (null == head) {
head = new ArrayList<>();
}
if ("ROW_NUMBER".equals(col)) {
return this;
}
if (head.contains(col)) {
return this;
}
head.add(col);
return this;
}
/**
* 表头
*
* @return return
*/
public List<String> getHead() {
return head;
}
public int indexOf(Object obj) {
return rows.indexOf(obj);
}
/**
* 从begin开始截断到end,方法执行将改变原DataSet长度
*
* @param begin 开始位置
* @param end 结束位置
* @return DataSet
*/
public DataSet truncates(int begin, int end) {
if (!rows.isEmpty()) {
if (begin < 0) {
begin = 0;
}
if (end >= rows.size()) {
end = rows.size() - 1;
}
if (begin >= rows.size()) {
begin = rows.size() - 1;
}
if (end <= 0) {
end = 0;
}
rows = rows.subList(begin, end);
}
return this;
}
/**
* 从begin开始截断到最后一个
*
* @param begin 开始位置
* @return DataSet
*/
public DataSet truncates(int begin) {
if (begin < 0) {
begin = rows.size() + begin;
int end = rows.size() - 1;
return truncates(begin, end);
} else {
return truncates(begin, rows.size() - 1);
}
}
/**
* 从begin开始截断到最后一个并返回其中第一个DataRow
*
* @param begin 开始位置
* @return DataRow
*/
public DataRow truncate(int begin) {
return truncate(begin, rows.size() - 1);
}
/**
* 从begin开始截断到end位置并返回其中第一个DataRow
*
* @param begin 开始位置
* @param end 结束位置
* @return DataRow
*/
public DataRow truncate(int begin, int end) {
truncates(begin, end);
if (rows.size() > 0) {
return rows.get(0);
} else {
return null;
}
}
/**
* 从begin开始截取到最后一个
*
* @param begin 开始位置
* 如果输入负数则取后n个,如果造成数量不足,则取全部
* @return DataSet
*/
public DataSet cuts(int begin) {
if (begin < 0) {
begin = rows.size() + begin;
int end = rows.size() - 1;
return cuts(begin, end);
} else {
return cuts(begin, rows.size() - 1);
}
}
/**
* 从begin开始截取到end位置,方法执行时会创建新的DataSet并不改变原有set长度
*
* @param begin 开始位置
* @param end 结束位置
* @return DataSet
*/
public DataSet cuts(int begin, int end) {
DataSet result = new DataSet();
if (rows.isEmpty()) {
return result;
}
if (begin < 0) {
begin = 0;
}
if (end >= rows.size()) {
end = rows.size() - 1;
}
if (begin >= rows.size()) {
begin = rows.size() - 1;
}
if (end <= 0) {
end = 0;
}
for (int i = begin; i <= end; i++) {
result.add(rows.get(i));
}
return result;
}
/**
* 从begin开始截取到最后一个,并返回其中第一个DataRow
*
* @param begin 开始位置
* @return DataSet
*/
public DataRow cut(int begin) {
return cut(begin, rows.size() - 1);
}
/**
* 从begin开始截取到end位置,并返回其中第一个DataRow,方法执行时会创建新的DataSet并不改变原有set长度
*
* @param begin 开始位置
* @param end 结束位置
* @return DataSet
*/
public DataRow cut(int begin, int end) {
DataSet result = cuts(begin, end);
if (result.size() > 0) {
return result.getRow(0);
}
return null;
}
/**
* 记录数量
*
* @return return
*/
public int size() {
int result = 0;
if (null != rows)
result = rows.size();
return result;
}
public int getSize() {
return size();
}
/**
* 是否出现异常
*
* @return return
*/
public boolean isException() {
return null != exception;
}
public boolean isFromCache() {
return isFromCache;
}
public DataSet setIsFromCache(boolean bol) {
this.isFromCache = bol;
return this;
}
/**
* 返回数据是否为空
*
* @return return
*/
public boolean isEmpty() {
boolean result = true;
if (null == rows) {
result = true;
} else if (rows instanceof Collection) {
result = ((Collection<?>) rows).isEmpty();
}
return result;
}
/**
* 读取一行数据
*
* @param index index
* @return return
*/
public DataRow getRow(int index) {
DataRow row = null;
if (null != rows && index < rows.size()) {
row = rows.get(index);
}
if (null != row) {
row.setContainer(this);
}
return row;
}
public boolean exists(String ... params){
DataRow row = getRow(0, params);
return row != null;
}
public DataRow getRow(String... params) {
return getRow(0, params);
}
public DataRow getRow(DataRow params) {
return getRow(0, params);
}
public DataRow getRow(List<String> params) {
String[] kvs = BeanUtil.list2array(params);
return getRow(0, kvs);
}
public DataRow getRow(int begin, String... params) {
DataSet set = getRows(begin, 1, params);
if (set.size() > 0) {
return set.getRow(0);
}
return null;
}
public DataRow getRow(int begin, DataRow params) {
DataSet set = getRows(begin, 1, params);
if (set.size() > 0) {
return set.getRow(0);
}
return null;
}
/**
* 根据keys去重
*
* @param keys keys
* @return DataSet
*/
public DataSet distinct(String... keys) {
DataSet result = new DataSet();
if (null != rows) {
int size = rows.size();
for (int i = 0; i < size; i++) {
DataRow row = rows.get(i);
//查看result中是否已存在
String[] params = packParam(row, keys);
if (result.getRow(params) == null) {
DataRow tmp = new DataRow();
for (String key : keys) {
tmp.put(key, row.get(key));
}
result.addRow(tmp);
}
}
}
result.cloneProperty(this);
return result;
}
public DataSet distinct(List<String> keys) {
DataSet result = new DataSet();
if (null != rows) {
for (DataRow row:rows) {
//查看result中是否已存在
String[] params = packParam(row, keys);
if (result.getRow(params) == null) {
DataRow tmp = new DataRow();
for (String key : keys) {
tmp.put(key, row.get(key));
}
result.addRow(tmp);
}
}
}
result.cloneProperty(this);
return result;
}
public Object clone() {
DataSet set = new DataSet();
List<DataRow> rows = new ArrayList<DataRow>();
for (DataRow row : this.rows) {
rows.add((DataRow) row.clone());
}
set.setRows(rows);
set.cloneProperty(this);
return set;
}
private DataSet cloneProperty(DataSet from) {
return cloneProperty(from, this);
}
public static DataSet cloneProperty(DataSet from, DataSet to) {
if (null != from && null != to) {
to.exception = from.exception;
to.message = from.message;
to.navi = from.navi;
to.head = from.head;
to.primaryKeys = from.primaryKeys;
to.dataSource = from.dataSource;
to.datalink = from.datalink;
to.schema = from.schema;
to.table = from.table;
}
return to;
}
/**
* 指定key转换成number
* @param keys keys
* @return DataRow
*/
public DataSet convertNumber(String ... keys){
if(null != keys) {
for(DataRow row:rows){
row.convertNumber(keys);
}
}
return this;
}
public DataSet convertString(String ... keys){
if(null != keys) {
for(DataRow row:rows){
row.convertString(keys);
}
}
return this;
}
public DataSet skip(boolean skip){
for(DataRow row:rows){
row.skip = skip;
}
return this;
}
/**
* 筛选符合条件的集合
* 注意如果String类型 1与1.0比较不相等, 可以先调用convertNumber转换一下数据类型
* @param params key1,value1,key2:value2,key3,value3
* "NM:zh%","AGE:>20","NM","%zh%"
* @param begin begin
* @param qty 最多筛选多少个 0表示不限制
* @return return
*/
public DataSet getRows(int begin, int qty, String... params) {
DataSet set = new DataSet();
Map<String, String> kvs = new HashMap<String, String>();
int len = params.length;
int i = 0;
String srcFlagTag = "srcFlag"; //参数含有{}的 在kvs中根据key值+tag 放入一个新的键值对,如时间格式TIME:{10:10}
while (i < len) {
String p1 = params[i];
if (BasicUtil.isEmpty(p1)) {
i++;
continue;
} else if (p1.contains(":")) {
String ks[] = BeanUtil.parseKeyValue(p1);
kvs.put(ks[0], ks[1]);
i++;
continue;
} else {
if (i + 1 < len) {
String p2 = params[i + 1];
if (BasicUtil.isEmpty(p2) || !p2.contains(":")) {
kvs.put(p1, p2);
i += 2;
continue;
} else if (p2.startsWith("{") && p2.endsWith("}")) {
p2 = p2.substring(1, p2.length() - 1);
kvs.put(p1, p2);
kvs.put(p1 + srcFlagTag, "true");
i += 2;
continue;
} else {
String ks[] = BeanUtil.parseKeyValue(p2);
kvs.put(ks[0], ks[1]);
i += 2;
continue;
}
}
}
i++;
}
return getRows(begin, qty, kvs);
}
public DataSet getRows(int begin, int qty, DataRow kvs) {
Map<String,String> map = new HashMap<String,String>();
for(String k:kvs.keySet()){
map.put(k, kvs.getString(k));
}
return getRows(begin, qty, map);
}
public DataSet getRows(int begin, int qty, Map<String, String> kvs) {
DataSet set = new DataSet();
String srcFlagTag = "srcFlag"; //参数含有{}的 在kvs中根据key值+tag 放入一个新的键值对
BigDecimal d1;
BigDecimal d2;
for (DataRow row:rows) {
if(row.skip){
continue;
}
boolean chk = true;//对比结果
for (String k : kvs.keySet()) {
boolean srcFlag = false;
if (k.endsWith(srcFlagTag)) {
continue;
} else {
String srcFlagValue = kvs.get(k + srcFlagTag);
if (BasicUtil.isNotEmpty(srcFlagValue)) {
srcFlag = true;
}
}
String v = kvs.get(k);
Object value = row.get(k);
if(!row.containsKey(k) && null == value){
//注意这里有可能是个复合key
chk = false;
break;
}
if (null == v) {
if (null != value) {
chk = false;
break;
}else{
continue;
}
} else {
if (null == value) {
chk = false;
break;
}
//与SQL.COMPARE_TYPE保持一致
int compare = 10;
if (v.startsWith("=")) {
compare = 10;
v = v.substring(1);
} else if (v.startsWith(">")) {
compare = 20;
v = v.substring(1);
} else if (v.startsWith(">=")) {
compare = 21;
v = v.substring(2);
} else if (v.startsWith("<")) {
compare = 30;
v = v.substring(1);
} else if (v.startsWith("<=")) {
compare = 31;
v = v.substring(2);
} else if (v.startsWith("%") && v.endsWith("%")) {
compare = 50;
v = v.substring(1, v.length() - 1);
} else if (v.endsWith("%")) {
compare = 51;
v = v.substring(0, v.length() - 1);
} else if (v.startsWith("%")) {
compare = 52;
v = v.substring(1);
}
if(compare <= 31 && value instanceof Number) {
try {
d1 = new BigDecimal(value.toString());
d2 = new BigDecimal(v);
int cr = d1.compareTo(d2);
if (compare == 10) {
if (cr != 0) {
chk = false;
break;
}
} else if (compare == 20) {
if (cr <= 0) {
chk = false;
break;
}
} else if (compare == 21) {
if (cr < 0) {
chk = false;
break;
}
} else if (compare == 30) {
if (cr >= 0) {
chk = false;
break;
}
} else if (compare == 31) {
if (cr > 0) {
chk = false;
break;
}
}
}catch (NumberFormatException e){
chk = false;
break;
}
}
String str = value + "";
str = str.toLowerCase();
v = v.toLowerCase();
if (srcFlag) {
v = "{" + v + "}";
}
if (compare == 10) {
if (!v.equals(str)) {
chk = false;
break;
}
} else if (compare == 50) {
if (!str.contains(v)) {
chk = false;
break;
}
} else if (compare == 51) {
if (!str.startsWith(v)) {
chk = false;
break;
}
} else if (compare == 52) {
if (!str.endsWith(v)) {
chk = false;
break;
}
}
}
}//end for kvs
if (chk) {
set.add(row);
if (qty > 0 && set.size() >= qty) {
break;
}
}
}//end for rows
set.cloneProperty(this);
return set;
}
public DataSet getRows(int begin, String... params) {
return getRows(begin, -1, params);
}
public DataSet getRows(String... params) {
return getRows(0, params);
}
public DataSet getRows(DataSet set, String key) {
String kvs[] = new String[set.size()];
int i = 0;
for (DataRow row : set) {
String value = row.getString(key);
if (BasicUtil.isNotEmpty(value)) {
kvs[i++] = key + ":" + value;
}
}
return getRows(kvs);
}
public DataSet getRows(DataRow row, String... keys) {
List<String> list = new ArrayList<>();
int i = 0;
for (String key : keys) {
String value = row.getString(key);
if (BasicUtil.isNotEmpty(value)) {
list.add(key + ":" + value);
}
}
String[] kvs = BeanUtil.list2array(list);
return getRows(kvs);
}
/**
* 数字格式化
*
* @param format format
* @param cols cols
* @return return
*/
public DataSet formatNumber(String format, String... cols) {
if (null == cols || BasicUtil.isEmpty(format)) {
return this;
}
int size = size();
for (int i = 0; i < size; i++) {
DataRow row = getRow(i);
row.formatNumber(format, cols);
}
return this;
}
public DataSet numberFormat(String target, String key, String format){
for(DataRow row: rows){
numberFormat(target, key, format);
}
return this;
}
public DataSet numberFormat(String key, String format){
return numberFormat(key, key, format);
}
/**
* 日期格式化
*
* @param format format
* @param cols cols
* @return return
*/
public DataSet formatDate(String format, String... cols) {
if (null == cols || BasicUtil.isEmpty(format)) {
return this;
}
int size = size();
for (int i = 0; i < size; i++) {
DataRow row = getRow(i);
row.formatDate(format, cols);
}
return this;
}
public DataSet dateFormat(String target, String key, String format){
for(DataRow row: rows){
dateFormat(target, key, format);
}
return this;
}
public DataSet dateFormat(String key, String format){
return dateFormat(key, key, format);
}
/**
* 提取符合指定属性值的集合
*
* @param begin begin
* @param end end
* @param key key
* @param value value
* @return return
*/
public DataSet filter(int begin, int end, String key, String value) {
DataSet set = new DataSet();
String tmpValue;
int size = size();
if (begin < 0) {
begin = 0;
}
for (int i = begin; i < size && i <= end; i++) {
tmpValue = getString(i, key, "");
if ((null == value && null == tmpValue)
|| (null != value && value.equals(tmpValue))) {
set.add(getRow(i));
}
}
set.cloneProperty(this);
return set;
}
public DataSet getRows(int fr, int to) {
DataSet set = new DataSet();
int size = this.size();
if (fr < 0) {
fr = 0;
}
for (int i = fr; i < size && i <= to; i++) {
set.addRow(getRow(i));
}
return set;
}
/**
* 合计
* @param begin 开始
* @param end 结束
* @param key key
* @return BigDecimal
*/
public BigDecimal sum(int begin, int end, String key) {
BigDecimal result = BigDecimal.ZERO;
int size = rows.size();
if (begin <= 0) {
begin = 0;
}
for (int i = begin; i < size && i <= end; i++) {
BigDecimal tmp = getDecimal(i, key, 0);
if (null != tmp) {
result = result.add(getDecimal(i, key, 0));
}
}
return result;
}
public BigDecimal sum(String key) {
BigDecimal result = BigDecimal.ZERO;
result = sum(0, size() - 1, key);
return result;
}
/**
* 多列合计
* @param result 保存合计结果
* @param keys keys
* @return DataRow
*/
public DataRow sums(DataRow result, String... keys) {
if(null == result){
result = new DataRow();
}
if (size() > 0) {
if (null != keys) {
for (String key : keys) {
result.put(key, sum(key));
}
} else {
List<String> numberKeys = getRow(0).numberKeys();
for (String key : numberKeys) {
result.put(key, sum(key));
}
}
}
return result;
}
public DataRow sums(String... keys) {
return sums(new DataRow(), keys);
}
/**
* 多列平均值
*
* @param result 保存合计结果
* @param keys keys
* @return DataRow
*/
public DataRow avgs(DataRow result, String... keys) {
if(null == result){
result = new DataRow();
}
if (size() > 0) {
if (null != keys) {
for (String key : keys) {
result.put(key, avg(key));
}
} else {
List<String> numberKeys = getRow(0).numberKeys();
for (String key : numberKeys) {
result.put(key, avg(key));
}
}
}
return result;
}
public DataRow avgs(String... keys) {
return avgs(new DataRow(), keys);
}
/**
* 多列平均值
* @param result 保存合计结果
* @param scale scale
* @param round round
* @param keys keys
* @return DataRow
*/
public DataRow avgs(DataRow result, int scale, int round, String... keys) {
if(null == result){
result = new DataRow();
}
if (size() > 0) {
if (null != keys) {
for (String key : keys) {
result.put(key, avg(key, scale, round));
}
} else {
List<String> numberKeys = getRow(0).numberKeys();
for (String key : numberKeys) {
result.put(key, avg(key, scale, round));
}
}
}
return result;
}
public DataRow avgs(int scale, int round, String... keys) {
return avgs(new DataRow(), scale, round, keys);
}
/**
* 最大值
*
* @param top 多少行
* @param key key
* @return return
*/
public BigDecimal maxDecimal(int top, String key) {
BigDecimal result = null;
int size = rows.size();
if (size > top) {
size = top;
}
for (int i = 0; i < size; i++) {
BigDecimal tmp = getDecimal(i, key, 0);
if (null != tmp && (null == result || tmp.compareTo(result) > 0)) {
result = tmp;
}
}
return result;
}
public BigDecimal maxDecimal(String key) {
return maxDecimal(size(), key);
}
public int maxInt(int top, String key) {
BigDecimal result = maxDecimal(top, key);
if (null == result) {
return 0;
}
return result.intValue();
}
public int maxInt(String key) {
return maxInt(size(), key);
}
public double maxDouble(int top, String key) {
BigDecimal result = maxDecimal(top, key);
if (null == result) {
return 0;
}
return result.doubleValue();
}
public double maxDouble(String key) {
return maxDouble(size(), key);
}
// public BigDecimal max(int top, String key){
// BigDecimal result = maxDecimal(top, key);
// return result;
// }
// public BigDecimal max(String key){
// return maxDecimal(size(), key);
// }
/**
* 最小值
*
* @param top 多少行
* @param key key
* @return return
*/
public BigDecimal minDecimal(int top, String key) {
BigDecimal result = null;
int size = rows.size();
if (size > top) {
size = top;
}
for (int i = 0; i < size; i++) {
BigDecimal tmp = getDecimal(i, key, 0);
if (null != tmp && (null == result || tmp.compareTo(result) < 0)) {
result = tmp;
}
}
return result;
}
public BigDecimal minDecimal(String key) {
return minDecimal(size(), key);
}
public int minInt(int top, String key) {
BigDecimal result = minDecimal(top, key);
if (null == result) {
return 0;
}
return result.intValue();
}
public int minInt(String key) {
return minInt(size(), key);
}
public double minDouble(int top, String key) {
BigDecimal result = minDecimal(top, key);
if (null == result) {
return 0;
}
return result.doubleValue();
}
public double minDouble(String key) {
return minDouble(size(), key);
}
// public BigDecimal min(int top, String key){
// BigDecimal result = minDecimal(top, key);
// return result;
// }
// public BigDecimal min(String key){
// return minDecimal(size(), key);
// }
/**
* key对应的value最大的一行
*
* @param key key
* @return return
*/
public DataRow max(String key) {
int size = size();
if (size == 0) {
return null;
}
DataRow row = null;
if (isAsc) {
row = getRow(size - 1);
} else if (isDesc) {
row = getRow(0);
} else {
asc(key);
row = getRow(size - 1);
}
return row;
}
public DataRow min(String key) {
int size = size();
if (size == 0) {
return null;
}
DataRow row = null;
if (isAsc) {
row = getRow(0);
} else if (isDesc) {
row = getRow(size - 1);
} else {
asc(key);
row = getRow(0);
}
return row;
}
/**
* 平均值 空数据不参与加法但参与除法
*
* @param top 多少行
* @param key key
* @param scale scale
* @param round round
* @return return
*/
public BigDecimal avg(int top, String key, int scale, int round) {
BigDecimal result = BigDecimal.ZERO;
int size = rows.size();
if (size > top) {
size = top;
}
int count = 0;
for (int i = 0; i < size; i++) {
BigDecimal tmp = getDecimal(i, key, 0);
if (null != tmp) {
result = result.add(tmp);
}
count++;
}
if (count > 0) {
result = result.divide(new BigDecimal(count), scale, round);
}
return result;
}
public BigDecimal avg(String key, int scale, int round) {
BigDecimal result = avg(size(), key, scale ,round);
return result;
}
public BigDecimal avg(String key) {
BigDecimal result = avg(size(), key, 2, BigDecimal.ROUND_HALF_UP);
return result;
}
public DataSet addRow(DataRow row) {
if (null != row) {
rows.add(row);
}
return this;
}
public DataSet addRow(int idx, DataRow row) {
if (null != row) {
rows.add(idx, row);
}
return this;
}
/**
* 合并key例的值 以connector连接
*
* @param key key
* @param connector connector
* @return return v1,v2,v3
*/
public String concat(String key, String connector) {
return BasicUtil.concat(getStrings(key), connector);
}
public String concatNvl(String key, String connector) {
return BasicUtil.concat(getNvlStrings(key), connector);
}
/**
* 合并key例的值 以connector连接(不取null值)
*
* @param key key
* @param connector connector
* @return return v1,v2,v3
*/
public String concatWithoutNull(String key, String connector) {
return BasicUtil.concat(getStringsWithoutNull(key), connector);
}
/**
* 合并key例的值 以connector连接(不取空值)
*
* @param key key
* @param connector connector
* @return return v1,v2,v3
*/
public String concatWithoutEmpty(String key, String connector) {
return BasicUtil.concat(getStringsWithoutEmpty(key), connector);
}
public String concatNvl(String key) {
return BasicUtil.concat(getNvlStrings(key), ",");
}
public String concatWithoutNull(String key) {
return BasicUtil.concat(getStringsWithoutNull(key), ",");
}
public String concatWithoutEmpty(String key) {
return BasicUtil.concat(getStringsWithoutEmpty(key), ",");
}
public String concat(String key) {
return BasicUtil.concat(getStrings(key), ",");
}
/**
* 提取单列值
*
* @param key key
* @return return
*/
public List<Object> fetchValues(String key) {
List<Object> result = new ArrayList<Object>();
for (int i = 0; i < size(); i++) {
result.add(get(i, key));
}
return result;
}
/**
* 取单列不重复的值
*
* @param key key
* @return return
*/
public List<String> fetchDistinctValue(String key) {
List<String> result = new ArrayList<>();
for (int i = 0; i < size(); i++) {
String value = getString(i, key, "");
if (result.contains(value)) {
continue;
}
result.add(value);
}
return result;
}
public List<String> fetchDistinctValues(String key) {
return fetchDistinctValue(key);
}
/**
* 分页
*
* @param link link
* @return return
*/
public String displayNavi(String link) {
String result = "";
if (null != navi) {
result = navi.getHtml();
}
return result;
}
public String navi(String link) {
return displayNavi(link);
}
public String displayNavi() {
return displayNavi(null);
}
public String navi() {
return displayNavi(null);
}
public DataSet put(int idx, String key, Object value) {
DataRow row = getRow(idx);
if (null != row) {
row.put(key, value);
}
return this;
}
public DataSet removes(String... keys) {
for (DataRow row : rows) {
row.removes(keys);
}
return this;
}
/**
* String
*
* @param index index
* @param key key
* @return String
* @throws Exception Exception
*/
public String getString(int index, String key) throws Exception {
return getRow(index).getString(key);
}
public String getString(int index, String key, String def) {
try {
return getString(index, key);
} catch (Exception e) {
return def;
}
}
public String getString(String key) throws Exception {
return getString(0, key);
}
public String getString(String key, String def) {
return getString(0, key, def);
}
public Object get(int index, String key) {
DataRow row = getRow(index);
if (null != row) {
return row.get(key);
}
return null;
}
public List<Object> gets(String key) {
List<Object> list = new ArrayList<Object>();
for (DataRow row : rows) {
list.add(row.getString(key));
}
return list;
}
public List<DataSet> getSets(String key) {
List<DataSet> list = new ArrayList<DataSet>();
for (DataRow row : rows) {
DataSet set = row.getSet(key);
if (null != set) {
list.add(set);
}
}
return list;
}
public List<String> getStrings(String key) {
List<String> result = new ArrayList<>();
for (DataRow row : rows) {
result.add(row.getString(key));
}
return result;
}
public List<Integer> getInts(String key) throws Exception {
List<Integer> result = new ArrayList<Integer>();
for (DataRow row : rows) {
result.add(row.getInt(key));
}
return result;
}
public List<Object> getObjects(String key) {
List<Object> result = new ArrayList<Object>();
for (DataRow row : rows) {
result.add(row.get(key));
}
return result;
}
public List<String> getDistinctStrings(String key) {
return fetchDistinctValue(key);
}
public List<String> getNvlStrings(String key) {
List<String> result = new ArrayList<>();
List<Object> list = fetchValues(key);
for (Object val : list) {
if (null != val) {
result.add(val.toString());
} else {
result.add("");
}
}
return result;
}
public List<String> getStringsWithoutEmpty(String key) {
List<String> result = new ArrayList<>();
List<Object> list = fetchValues(key);
for (Object val : list) {
if (BasicUtil.isNotEmpty(val)) {
result.add(val.toString());
}
}
return result;
}
public List<String> getStringsWithoutNull(String key) {
List<String> result = new ArrayList<>();
List<Object> list = fetchValues(key);
for (Object val : list) {
if (null != val) {
result.add(val.toString());
}
}
return result;
}
public BigDecimal getDecimal(int idx, String key) throws Exception {
return getRow(idx).getDecimal(key);
}
public BigDecimal getDecimal(int idx, String key, double def) {
return getDecimal(idx, key, new BigDecimal(def));
}
public BigDecimal getDecimal(int idx, String key, BigDecimal def) {
try {
BigDecimal val = getDecimal(idx, key);
if (null == val) {
return def;
}
return val;
} catch (Exception e) {
return def;
}
}
/**
* 抽取指定列生成新的DataSet 新的DataSet只包括指定列的值与分页信息,不包含其他附加信息(如来源表)
* @param keys keys
* @return DataSet
*/
public DataSet extract(String ... keys){
DataSet result = new DataSet();
for(DataRow row:rows){
DataRow item = row.extract(keys);
result.add(item);
}
result.navi = this.navi;
return result;
}
public DataSet extract(List<String> keys){
DataSet result = new DataSet();
for(DataRow row:rows){
DataRow item = row.extract(keys);
result.add(item);
}
result.navi = this.navi;
return result;
}
/**
* html格式(未实现)
*
* @param index index
* @param key key
* @return return
* @throws Exception Exception
*/
public String getHtmlString(int index, String key) throws Exception {
return getString(index, key);
}
public String getHtmlString(int index, String key, String def) {
return getString(index, key, def);
}
public String getHtmlString(String key) throws Exception {
return getHtmlString(0, key);
}
/**
* escape String
*
* @param index index
* @param key key
* @return return
* @throws Exception Exception
*/
public String getEscapeString(int index, String key) throws Exception {
return EscapeUtil.escape(getString(index, key)).toString();
}
public String getEscapeString(int index, String key, String def) {
try {
return getEscapeString(index, key);
} catch (Exception e) {
return EscapeUtil.escape(def).toString();
}
}
public String getDoubleEscapeString(int index, String key) throws Exception {
return EscapeUtil.doubleEscape(getString(index, key));
}
public String getDoubleEscapeString(int index, String key, String def) {
try {
return getDoubleEscapeString(index, key);
} catch (Exception e) {
return EscapeUtil.doubleEscape(def);
}
}
public String getEscapeString(String key) throws Exception {
return getEscapeString(0, key);
}
public String getDoubleEscapeString(String key) throws Exception {
return getDoubleEscapeString(0, key);
}
/**
* int
*
* @param index index
* @param key key
* @return return
* @throws Exception Exception
*/
public int getInt(int index, String key) throws Exception {
return getRow(index).getInt(key);
}
public int getInt(int index, String key, int def) {
try {
return getInt(index, key);
} catch (Exception e) {
return def;
}
}
public int getInt(String key) throws Exception {
return getInt(0, key);
}
public int getInt(String key, int def) {
return getInt(0, key, def);
}
/**
* double
*
* @param index index
* @param key key
* @return return
* @throws Exception Exception
*/
public double getDouble(int index, String key) throws Exception {
return getRow(index).getDouble(key);
}
public double getDouble(int index, String key, double def) {
try {
return getDouble(index, key);
} catch (Exception e) {
return def;
}
}
public double getDouble(String key) throws Exception {
return getDouble(0, key);
}
public double getDouble(String key, double def) {
return getDouble(0, key, def);
}
/**
* 在key列基础上 +value,如果原来没有key列则默认0并put到target
* @param target 计算结果key
* @param key key
* @param value value
* @return this
*/
public DataSet add(String target, String key, int value){
for(DataRow row:rows){
row.add(target, key, value);
}
return this;
}
public DataSet add(String target, String key, double value){
for(DataRow row:rows){
row.add(target, key, value);
}
return this;
}
public DataSet add(String target, String key, short value){
for(DataRow row:rows){
row.add(target, key, value);
}
return this;
}
public DataSet add(String target, String key, float value){
for(DataRow row:rows){
row.add(target, key, value);
}
return this;
}
public DataSet add(String target, String key, BigDecimal value){
for(DataRow row:rows){
row.add(target, key, value);
}
return this;
}
public DataSet add(String key, int value){
return add(key, key, value);
}
public DataSet add(String key, double value){
return add(key, key, value);
}
public DataSet add(String key, short value){
return add(key, key, value);
}
public DataSet add(String key, float value){
return add(key, key, value);
}
public DataSet add(String key, BigDecimal value){
return add(key, key, value);
}
public DataSet subtract(String target, String key, int value){
for(DataRow row:rows){
row.subtract(target, key, value);
}
return this;
}
public DataSet subtract(String target, String key, double value){
for(DataRow row:rows){
row.subtract(target, key, value);
}
return this;
}
public DataSet subtract(String target, String key, short value){
for(DataRow row:rows){
row.subtract(target, key, value);
}
return this;
}
public DataSet subtract(String target, String key, float value){
for(DataRow row:rows){
row.subtract(target, key, value);
}
return this;
}
public DataSet subtract(String target, String key, BigDecimal value){
for(DataRow row:rows){
row.subtract(target, key, value);
}
return this;
}
public DataSet subtract(String key, int value){
return subtract(key, key, value);
}
public DataSet subtract(String key, double value){
return subtract(key, key, value);
}
public DataSet subtract(String key, short value){
return subtract(key, key, value);
}
public DataSet subtract(String key, float value){
return subtract(key, key, value);
}
public DataSet subtract(String key, BigDecimal value){
return subtract(key, key, value);
}
public DataSet multiply(String target, String key, int value){
for(DataRow row:rows){
row.multiply(target, key, value);
}
return this;
}
public DataSet multiply(String target, String key, double value){
for(DataRow row:rows){
row.multiply(target, key, value);
}
return this;
}
public DataSet multiply(String target, String key, short value){
for(DataRow row:rows){
row.multiply(target, key, value);
}
return this;
}
public DataSet multiply(String target, String key, float value){
for(DataRow row:rows){
row.multiply(target, key, value);
}
return this;
}
public DataSet multiply(String target, String key, BigDecimal value){
for(DataRow row:rows){
row.multiply(target, key, value);
}
return this;
}
public DataSet multiply(String key, int value){
return multiply(key,key,value);
}
public DataSet multiply(String key, double value){
return multiply(key,key,value);
}
public DataSet multiply(String key, short value){
return multiply(key,key,value);
}
public DataSet multiply(String key, float value){
return multiply(key,key,value);
}
public DataSet multiply(String key, BigDecimal value){
return multiply(key,key,value);
}
public DataSet divide(String target, String key, int value){
for(DataRow row:rows){
row.divide(target, key, value);
}
return this;
}
public DataSet divide(String target, String key, double value){
for(DataRow row:rows){
row.divide(target, key, value);
}
return this;
}
public DataSet divide(String target, String key, short value){
for(DataRow row:rows){
row.divide(target, key, value);
}
return this;
}
public DataSet divide(String target, String key, float value){
for(DataRow row:rows){
row.divide(target, key, value);
}
return this;
}
public DataSet divide(String target, String key, BigDecimal value, int mode){
for(DataRow row:rows){
row.divide(target, key, value, mode);
}
return this;
}
public DataSet divide(String key, int value){
return divide(key,key, value);
}
public DataSet divide(String key, double value){
return divide(key,key, value);
}
public DataSet divide(String key, short value){
return divide(key,key, value);
}
public DataSet divide(String key, float value){
return divide(key,key, value);
}
public DataSet divide(String key, BigDecimal value, int mode){
return divide(key,key, value, mode);
}
public DataSet round(String target, String key, int scale, int mode){
for (DataRow row:rows){
row.round(target, key, scale, mode);
}
return this;
}
public DataSet round(String key, int scale, int mode){
return round(key, key, scale, mode);
}
/**
* DataSet拆分成size部分
* @param page 拆成多少部分
* @return list
*/
public List<DataSet> split(int page){
List<DataSet> list = new ArrayList<>();
int size = this.size();
int vol = size / page;//每页多少行
for(int i=0; i<page; i++){
int fr = i*vol;
int to = (i+1)*vol-1;
if(i == page-1){
to = size-1;
}
DataSet set = this.cuts(fr, to);
list.add(set);
}
return list;
}
/**
* rows 列表中的数据格式化成json格式 不同与toJSON
* map.put("type", "list");
* map.put("result", result);
* map.put("message", message);
* map.put("rows", rows);
* map.put("success", result);
* map.put("navi", navi);
*/
public String toString() {
Map<String, Object> map = new HashMap<String, Object>();
map.put("type", "list");
map.put("result", result);
map.put("message", message);
map.put("rows", rows);
map.put("success", result);
if(null != navi){
Map<String,Object> navi_ = new HashMap<String,Object>();
navi_.put("page", navi.getCurPage());
navi_.put("pages", navi.getTotalPage());
navi_.put("rows", navi.getTotalRow());
navi_.put("vol", navi.getPageRows());
map.put("navi", navi_);
}
return BeanUtil.map2json(map);
}
/**
* rows 列表中的数据格式化成json格式 不同与toString
*
* @return return
*/
public String toJson() {
return BeanUtil.object2json(this);
}
public String getJson() {
return toJSON();
}
public String toJSON() {
return toJson();
}
/**
* 根据指定列生成map
*
* @param key ID,{ID}_{NM}
* @return return
*/
public Map<String, DataRow> toMap(String key) {
Map<String, DataRow> maps = new HashMap<String, DataRow>();
for (DataRow row : rows) {
maps.put(row.getString(key), row);
}
return maps;
}
/**
* 子类
*
* @param idx idx
* @return return
*/
public Object getChildren(int idx) {
DataRow row = getRow(idx);
if (null != row) {
return row.getChildren();
}
return null;
}
public Object getChildren() {
return getChildren(0);
}
public DataSet setChildren(int idx, Object children) {
DataRow row = getRow(idx);
if (null != row) {
row.setChildren(children);
}
return this;
}
public DataSet setChildren(Object children) {
setChildren(0, children);
return this;
}
/**
* 父类
*
* @param idx idx
* @return return
*/
public Object getParent(int idx) {
DataRow row = getRow(idx);
if (null != row) {
return row.getParent();
}
return null;
}
public Object getParent() {
return getParent(0);
}
public DataSet setParent(int idx, Object parent) {
DataRow row = getRow(idx);
if (null != row) {
row.setParent(parent);
}
return this;
}
public DataSet setParent(Object parent) {
setParent(0, parent);
return this;
}
/**
* 转换成对象
*
* @param <T> T
* @param index index
* @param clazz clazz
* @return return
*/
public <T> T entity(int index, Class<T> clazz) {
DataRow row = getRow(index);
if (null != row) {
return row.entity(clazz);
}
return null;
}
/**
* 转换成对象集合
*
* @param <T> T
* @param clazz clazz
* @return return
*/
public <T> List<T> entity(Class<T> clazz) {
List<T> list = new ArrayList<T>();
if (null != rows) {
for (DataRow row : rows) {
list.add(row.entity(clazz));
}
}
return list;
}
public <T> T entity(Class<T> clazz, int idx) {
DataRow row = getRow(idx);
if (null != row) {
return row.entity(clazz);
}
return null;
}
public DataSet setDataSource(String dataSource) {
if (null == dataSource) {
return this;
}
this.dataSource = dataSource;
if (dataSource.contains(".") && !dataSource.contains(":")) {
schema = dataSource.substring(0, dataSource.indexOf("."));
table = dataSource.substring(dataSource.indexOf(".") + 1);
}
for (DataRow row : rows) {
if (BasicUtil.isEmpty(row.getDataSource())) {
row.setDataSource(dataSource);
}
}
return this;
}
/**
* 合并
* @param set DataSet
* @param keys 根据keys去重
* @return DataSet
*/
public DataSet union(DataSet set, String... keys) {
DataSet result = new DataSet();
if (null != rows) {
int size = rows.size();
for (int i = 0; i < size; i++) {
result.add(rows.get(i));
}
}
if (null == keys || keys.length == 0) {
keys = new String[1];
keys[0] = ConfigTable.getString("DEFAULT_PRIMARY_KEY");
}
int size = set.size();
for (int i = 0; i < size; i++) {
DataRow item = set.getRow(i);
if (!result.contains(item, keys)) {
result.add(item);
}
}
return result;
}
/**
* 合并合并不去重
*
* @param set set
* @return return
*/
public DataSet unionAll(DataSet set) {
DataSet result = new DataSet();
if (null != rows) {
int size = rows.size();
for (int i = 0; i < size; i++) {
result.add(rows.get(i));
}
}
int size = set.size();
for (int i = 0; i < size; i++) {
DataRow item = set.getRow(i);
result.add(item);
}
return result;
}
/**
* 是否包含这一行
*
* @param row row
* @param keys keys
* @return return
*/
public boolean contains(DataRow row, String... keys) {
if (null == rows || rows.size() == 0 || null == row) {
return false;
}
if (null == keys || keys.length == 0) {
keys = new String[1];
keys[0] = ConfigTable.getString("DEFAULT_PRIMARY_KEY", "ID");
}
String params[] = packParam(row, keys);
return exists(params);
}
public String[] packParam(DataRow row, String... keys) {
if (null == keys || null == row) {
return null;
}
String params[] = new String[keys.length * 2];
int idx = 0;
for (String key : keys) {
if (null == key) {
continue;
}
String ks[] = BeanUtil.parseKeyValue(key);
params[idx++] = ks[0];
params[idx++] = row.getString(ks[1]);
}
return params;
}
/**
* 根据数据与属性列表 封装kvs
* ["ID","1","CODE","A01"]
* @param row 数据 DataRow
* @param keys 属性 ID,CODE
* @return kvs
*/
public String[] packParam(DataRow row, List<String> keys) {
if (null == keys || null == row) {
return null;
}
String params[] = new String[keys.size() * 2];
int idx = 0;
for (String key : keys) {
if (null == key) {
continue;
}
String ks[] = BeanUtil.parseKeyValue(key);
params[idx++] = ks[0];
params[idx++] = row.getString(ks[1]);
}
return params;
}
/**
* 从items中按相应的key提取数据 存入
* dispatch("children",items, "DEPAT_CD")
* dispatchs("children",items, "CD:BASE_CD")
*
* @param field 默认"ITEMS"
* @param unique 是否只分配一次(同一个条目不能分配到多个组中)
* @param recursion 是否递归
* @param items items
* @param keys keys ID:DEPT_ID或ID
* @return return
*/
public DataSet dispatchs(String field, boolean unique, boolean recursion, DataSet items, String... keys) {
if(null == keys || keys.length == 0){
throw new RuntimeException("未指定对应关系");
}
if (null == items) {
return this;
}
if (BasicUtil.isEmpty(field)) {
field = "ITEMS";
}
for (DataRow row : rows) {
if (null == row.get(field)) {
String[] kvs = packParam(row, reverseKey(keys));
DataSet set = items.getRows(kvs);
if (recursion) {
set.dispatchs(field, unique, recursion, items, keys);
}
if(unique) {
set.skip(true);
}
row.put(field, set);
}
}
items.skip(false);
return this;
}
public DataSet dispatchs(boolean unique, boolean recursion, DataSet items, String... keys) {
return dispatchs("ITEMS", unique, recursion, items, keys);
}
public DataSet dispatchs(String field, DataSet items, String... keys) {
return dispatchs(field,false, false, items, keys);
}
public DataSet dispatchs(DataSet items, String... keys) {
return dispatchs("ITEMS", items, keys);
}
public DataSet dispatchs(boolean unique, boolean recursion, String... keys) {
return dispatchs("ITEMS", unique, recursion, this, keys);
}
public DataSet dispatchs(String field, boolean unique, boolean recursion, String... keys) {
return dispatchs(field, unique, recursion, this, keys);
}
public DataSet dispatch(String field, boolean unique, boolean recursion, DataSet items, String... keys) {
if(null == keys || keys.length == 0){
throw new RuntimeException("未指定对应关系");
}
if (null == items) {
return this;
}
if (BasicUtil.isEmpty(field)) {
field = "ITEM";
}
for (DataRow row : rows) {
if (null == row.get(field)) {
String[] params = packParam(row, reverseKey(keys));
DataRow result = items.getRow(params);
if(unique){
result.skip = true;
}
row.put(field, result);
}
}
items.skip(false);
return this;
}
public DataSet dispatch(String field, DataSet items, String... keys) {
return dispatch(field, false, false, items, keys);
}
public DataSet dispatch(DataSet items, String... keys) {
return dispatch("ITEM", items, keys);
}
public DataSet dispatch(boolean unique, boolean recursion, String... keys) {
return dispatch("ITEM", unique, recursion, this, keys);
}
public DataSet dispatch(String field, boolean unique, boolean recursion, String... keys) {
return dispatch(field, unique, recursion, this, keys);
}
/**
* 直接调用dispatchs
* @param field 默认"ITEMS"
* @param unique 是否只分配一次(同一个条目不能分配到多个组中)
* @param recursion 是否递归
* @param items items
* @param keys keys ID:DEPT_ID或ID
* @return return
*/
@Deprecated
public DataSet dispatchItems(String field, boolean unique, boolean recursion, DataSet items, String... keys) {
return dispatchs(field, unique, recursion, items, keys);
}
@Deprecated
public DataSet dispatchItems(boolean unique, boolean recursion, DataSet items, String... keys) {
return dispatchs( unique, recursion, items, keys);
}
@Deprecated
public DataSet dispatchItems(String field, DataSet items, String... keys) {
return dispatchs(field, items, keys);
}
@Deprecated
public DataSet dispatchItems(DataSet items, String... keys) {
return dispatchs(items, keys);
}
@Deprecated
public DataSet dispatchItems(boolean unique, boolean recursion, String... keys) {
return dispatchs( unique, recursion, keys);
}
@Deprecated
public DataSet dispatchItems(String field, boolean unique, boolean recursion, String... keys) {
return dispatchs(field, unique, recursion, keys);
}
@Deprecated
public DataSet dispatchItem(String field, boolean unique, boolean recursion, DataSet items, String... keys) {
return dispatch(field, unique, recursion, items, keys);
}
@Deprecated
public DataSet dispatchItem(String field, DataSet items, String... keys) {
return dispatch(field, items, keys);
}
@Deprecated
public DataSet dispatchItem(DataSet items, String... keys) {
return dispatch(items, keys);
}
@Deprecated
public DataSet dispatchItem(boolean unique, boolean recursion, String... keys) {
return dispatch(unique, recursion, keys);
}
@Deprecated
public DataSet dispatchItem(String field, boolean unique, boolean recursion, String... keys) {
return dispatch(field, unique, recursion, keys);
}
/**
* 根据keys列建立关联,并将关联出来的结果拼接到集合的条目上,如果有重复则覆盖条目
*
* @param items 被查询的集合
* @param keys 关联条件列
* @return return
*/
public DataSet join(DataSet items, String... keys) {
if (null == items || null == keys || keys.length == 0) {
return this;
}
for (DataRow row : rows) {
String[] params = packParam(row, reverseKey(keys));
DataRow result = items.getRow(params);
if (null != result) {
row.copy(result, result.keys());
}
}
return this;
}
public DataSet toLowerKey() {
for (DataRow row : rows) {
row.toLowerKey();
}
return this;
}
public DataSet toUpperKey() {
for (DataRow row : rows) {
row.toUpperKey();
}
return this;
}
/**
* 按keys分组
*
* @param keys keys
* @return return
*/
public DataSet group(String... keys) {
DataSet result = distinct(keys);
result.dispatchs(true,false, this, keys);
return result;
}
public DataSet or(DataSet set, String... keys) {
return this.union(set, keys);
}
public DataSet getRows(Map<String, String> kvs) {
return getRows(0, -1, kvs);
}
/**
* 多个集合的交集
*
* @param distinct 是否根据keys抽取不重复的集合
* @param sets 集合
* @param keys 判断依据
* @return DataSet
*/
public static DataSet intersection(boolean distinct, List<DataSet> sets, String... keys) {
DataSet result = null;
if (null != sets && sets.size() > 0) {
for (DataSet set : sets) {
if (null == result) {
result = set;
} else {
result = result.intersection(distinct, set, keys);
}
}
}
if (null == result) {
result = new DataSet();
}
return result;
}
public static DataSet intersection(List<DataSet> sets, String... keys) {
return intersection(false, sets, keys);
}
/**
* 交集
*
* @param distinct 是否根据keys抽取不重复的集合(根据keys去重)
* @param set set
* @param keys 根据keys列比较是否相等,如果列名不一致"ID:USER_ID",ID表示当前DataSet的列,USER_ID表示参数中DataSet的列
* @return return
*/
public DataSet intersection(boolean distinct, DataSet set, String... keys) {
DataSet result = new DataSet();
if (null == set) {
return result;
}
for (DataRow row : rows) {
String[] kv = reverseKey(keys);
if (set.contains(row, kv)) { //符合交集
if(!result.contains(row, kv)){//result中没有
result.add((DataRow) row.clone());
}else {
if(!distinct){//result中有但不要求distinct
result.add((DataRow) row.clone());
}
}
}
}
return result;
}
public DataSet intersection(DataSet set, String... keys) {
return intersection(false, set, keys);
}
public DataSet and(boolean distinct, DataSet set, String... keys) {
return intersection(distinct, set, keys);
}
public DataSet and(DataSet set, String... keys) {
return intersection(false, set, keys);
}
/**
* 补集
* 在this中,但不在set中
* this作为超集 set作为子集
*
* @param distinct 是否根据keys抽取不重复的集合
* @param set set
* @param keys keys
* @return return
*/
public DataSet complement(boolean distinct, DataSet set, String... keys) {
DataSet result = new DataSet();
for (DataRow row : rows) {
String[] kv = reverseKey(keys);
if (null == set || !set.contains(row, kv)) {
if (!distinct || !result.contains(row, kv)) {
result.add((DataRow) row.clone());
}
}
}
return result;
}
public DataSet complement(DataSet set, String... keys) {
return complement(false, set, keys);
}
/**
* 差集
* 从当前集合中删除set中存在的row,生成新的DataSet并不修改当前对象
* this中有 set中没有的
*
* @param distinct 是否根据keys抽取不重复的集合
* @param set set
* @param keys CD,"CD:WORK_CD"
* @return return
*/
public DataSet difference(boolean distinct, DataSet set, String... keys) {
DataSet result = new DataSet();
for (DataRow row : rows) {
String[] kv = reverseKey(keys);
if (null == set || !set.contains(row, kv)) {
if (!distinct || !result.contains(row, kv)) {
result.add((DataRow) row.clone());
}
}
}
return result;
}
public DataSet difference(DataSet set, String... keys) {
return difference(false, set, keys);
}
/**
* 颠倒kv-vk
*
* @param keys kv
* @return String[]
*/
private String[] reverseKey(String[] keys) {
if (null == keys) {
return new String[0];
}
int size = keys.length;
String result[] = new String[size];
for (int i = 0; i < size; i++) {
String key = keys[i];
if (BasicUtil.isNotEmpty(key) && key.contains(":")) {
String ks[] = BeanUtil.parseKeyValue(key);
key = ks[1] + ":" + ks[0];
}
result[i] = key;
}
return result;
}
/**
* 清除指定列全为空的行,如果不指定keys,则清除所有列都为空的行
*
* @param keys keys
* @return DataSet
*/
public DataSet removeEmptyRow(String... keys) {
int size = this.size();
for (int i = size - 1; i >= 0; i--) {
DataRow row = getRow(i);
if (null == keys || keys.length == 0) {
if (row.isEmpty()) {
this.remove(row);
}
} else {
boolean isEmpty = true;
for (String key : keys) {
if (row.isNotEmpty(key)) {
isEmpty = false;
break;
}
}
if (isEmpty) {
this.remove(row);
}
}
}
return this;
}
public DataSet changeKey(String key, String target, boolean remove) {
for(DataRow row:rows){
row.changeKey(key, target, remove);
}
return this;
}
public DataSet changeKey(String key, String target) {
return changeKey(key, target, true);
}
/**
* 删除rows中的columns列
*
* @param columns 检测的列,如果不输入则检测所有列
* @return DataSet
*/
public DataSet removeColumn(String... columns) {
if (null != columns) {
for (String column : columns) {
for (DataRow row : rows) {
row.remove(column);
}
}
}
return this;
}
/**
* 删除rows中值为空(null|'')的列
*
* @param columns 检测的列,如果不输入则检测所有列
* @return DataSet
*/
public DataSet removeEmptyColumn(String... columns) {
for (DataRow row : rows) {
row.removeEmpty(columns);
}
return this;
}
/**
* NULL > ""
*
* @return DataSet
*/
public DataSet nvl() {
for (DataRow row : rows) {
row.nvl();
}
return this;
}
/* ********************************************** 实现接口 *********************************************************** */
public boolean add(DataRow e) {
return rows.add((DataRow) e);
}
@SuppressWarnings({"rawtypes", "unchecked"})
public boolean addAll(Collection c) {
return rows.addAll(c);
}
public void clear() {
rows.clear();
}
public boolean contains(Object o) {
return rows.contains(o);
}
public boolean containsAll(Collection<?> c) {
return rows.containsAll(c);
}
public Iterator<DataRow> iterator() {
return rows.iterator();
}
public boolean remove(Object o) {
return rows.remove(o);
}
public boolean removeAll(Collection<?> c) {
return rows.removeAll(c);
}
public boolean retainAll(Collection<?> c) {
return rows.retainAll(c);
}
public Object[] toArray() {
return rows.toArray();
}
@SuppressWarnings("unchecked")
public Object[] toArray(Object[] a) {
return rows.toArray(a);
}
public String getSchema() {
return schema;
}
public DataSet setSchema(String schema) {
this.schema = schema;
return this;
}
public String getTable() {
return table;
}
public DataSet setTable(String table) {
if (null != table && table.contains(".")) {
String[] tbs = table.split("\\.");
this.table = tbs[1];
this.schema = tbs[0];
} else {
this.table = table;
}
return this;
}
/**
* 验证是否过期
* 根据当前时间与创建时间对比
* 过期返回 true
*
* @param millisecond 过期时间(毫秒) millisecond 过期时间(毫秒)
* @return boolean
*/
public boolean isExpire(int millisecond) {
if (System.currentTimeMillis() - createTime > millisecond) {
return true;
}
return false;
}
public boolean isExpire(long millisecond) {
if (System.currentTimeMillis() - createTime > millisecond) {
return true;
}
return false;
}
public boolean isExpire() {
if (getExpires() == -1) {
return false;
}
if (System.currentTimeMillis() - createTime > getExpires()) {
return true;
}
return false;
}
public long getCreateTime() {
return createTime;
}
public List<DataRow> getRows() {
return rows;
}
/************************** getter setter ***************************************/
/**
* 过期时间(毫秒)
*
* @return long
*/
public long getExpires() {
return expires;
}
public DataSet setExpires(long millisecond) {
this.expires = millisecond;
return this;
}
public DataSet setExpires(int millisecond) {
this.expires = millisecond;
return this;
}
public boolean isResult() {
return result;
}
public boolean isSuccess() {
return result;
}
public DataSet setResult(boolean result) {
this.result = result;
return this;
}
public Exception getException() {
return exception;
}
public DataSet setException(Exception exception) {
this.exception = exception;
return this;
}
public String getMessage() {
return message;
}
public DataSet setMessage(String message) {
this.message = message;
return this;
}
public PageNavi getNavi() {
return navi;
}
public DataSet setNavi(PageNavi navi) {
this.navi = navi;
return this;
}
public DataSet setRows(List<DataRow> rows) {
this.rows = rows;
return this;
}
public String getDataSource() {
String ds = table;
if (BasicUtil.isNotEmpty(ds) && BasicUtil.isNotEmpty(schema)) {
ds = schema + "." + ds;
}
if (BasicUtil.isEmpty(ds)) {
ds = dataSource;
}
return ds;
}
public DataSet order(final String... keys) {
return asc(keys);
}
public DataSet put(String key, Object value, boolean pk, boolean override) {
for (DataRow row : rows) {
row.put(key, value, pk, override);
}
return this;
}
public DataSet put(String key, Object value, boolean pk) {
for (DataRow row : rows) {
row.put(key, value, pk);
}
return this;
}
public DataSet put(String key, Object value) {
for (DataRow row : rows) {
row.put(key, value);
}
return this;
}
/**
* 行转列
* 表结构(编号, 姓名, 年度, 科目, 分数, 等级)
* @param pks 唯一标识key(如编号,姓名)
* @param classKeys 分类key(如年度,科目)
* @param valueKeys 取值key(如分数,等级),如果不指定key则将整行作为value
* @return
* 如果指定key
* 返回结构 [
* {编号:01,姓名:张三,2010-数学-分数:100},
* {编号:01,姓名:张三,2010-数学-等级:A},
* {编号:01,姓名:张三,2010-物理-分数:100}
* ]
* 如果只有一个valueKey则返回[
* {编号:01,姓名:张三,2010-数学:100},
* {编号:01,姓名:张三,2010-物理:90}
* ]
* 不指定valuekey则返回 [
* {编号:01,姓名:张三,2010-数学:{分数:100,等级:A}},
* {编号:01,姓名:张三,2010-物理:{分数:100,等级:A}}
* ]
*/
public DataSet pivot(List<String> pks, List<String> classKeys, List<String> valueKeys) {
DataSet result = distinct(pks);
DataSet classValues = distinct(classKeys); //[{年度:2010,科目:数学},{年度:2010,科目:物理},{年度:2011,科目:数学}]
for (DataRow row : result) {
for (DataRow classValue : classValues) {
DataRow params = new DataRow();
params.copy(row, pks).copy(classValue);
DataRow valueRow = getRow(params);
if(null != valueRow){
valueRow.skip = true;
}
String finalKey = concatValue(classValue,"-");//2010-数学
if(null != valueKeys && valueKeys.size() > 0){
if(valueKeys.size() == 1){
if (null != valueRow) {
row.put(finalKey, valueRow.get(valueKeys.get(0)));
} else {
row.put(finalKey, null);
}
}else {
for (String valueKey : valueKeys) {
//{2010-数学-分数:100;2010-数学-等级:A}
if (null != valueRow) {
row.put(finalKey + "-" + valueKey, valueRow.get(valueKey));
} else {
row.put(finalKey + "-" + valueKey, null);
}
}
}
}else{
if (null != valueRow){
row.put(finalKey, valueRow);
}else{
row.put(finalKey, null);
}
}
}
}
skip(false);
return result;
}
public DataSet pivot(String[] pks, String[] classKeys, String[] valueKeys) {
return pivot(Arrays.asList(pks),Arrays.asList(classKeys),Arrays.asList(valueKeys));
}
/**
* 行转列
* @param pk 唯一标识key(如姓名)多个key以,分隔如(编号,姓名)
* @param classKey 分类key(如科目)多个key以,分隔如(科目,年度)
* @param valueKey 取值key(如分数)多个key以,分隔如(分数,等级)
* @return
* 表结构(姓名,科目,分数)
* 返回结构 [{姓名:张三,数学:100,物理:90,英语:80},{姓名:李四,数学:100,物理:90,英语:80}]
*/
public DataSet pivot(String pk, String classKey, String valueKey) {
List<String> pks = new ArrayList<>(Arrays.asList(pk.trim().split(",")));
List<String> classKeys = new ArrayList<>(Arrays.asList(classKey.trim().split(",")));
List<String> valueKeys = new ArrayList<>(Arrays.asList(valueKey.trim().split(",")));
return pivot(pks, classKeys, valueKeys);
}
public DataSet pivot(String pk, String classKey) {
List<String> pks = new ArrayList<>(Arrays.asList(pk.trim().split(",")));
List<String> classKeys = new ArrayList<>(Arrays.asList(classKey.trim().split(",")));
List<String> valueKeys = new ArrayList<>();
return pivot(pks, classKeys, valueKeys);
}
public DataSet pivot(List<String> pks, List<String> classKeys, String ... valueKeys) {
List<String> list = new ArrayList<>();
if(null != valueKeys){
for(String item:valueKeys){
list.add(item);
}
}
return pivot(pks, classKeys, valueKeys);
}
private String concatValue(DataRow row, String split){
StringBuilder builder = new StringBuilder();
List<String> keys = row.keys();
for(String key:keys){
if(builder.length() > 0){
builder.append(split);
}
builder.append(row.getString(key));
}
return builder.toString();
}
private String[] kvs(DataRow row){
List<String> keys = row.keys();
int size = keys.size();
String[] kvs = new String[size*2];
for(int i=0; i<size; i++){
String k = keys.get(i);
String v = row.getStringNvl(k);
kvs[i*2] = k;
kvs[i*2+1] = v;
}
return kvs;
}
/**
* 排序
*
* @param keys keys
* @return DataSet
*/
public DataSet asc(final String... keys) {
Collections.sort(rows, new Comparator<DataRow>() {
public int compare(DataRow r1, DataRow r2) {
int result = 0;
for (String key : keys) {
Object v1 = r1.get(key);
Object v2 = r2.get(key);
if (null == v1) {
if (null == v2) {
continue;
}
return -1;
} else {
if (null == v2) {
return 1;
}
}
if (BasicUtil.isNumber(v1) && BasicUtil.isNumber(v2)) {
BigDecimal num1 = new BigDecimal(v1.toString());
BigDecimal num2 = new BigDecimal(v2.toString());
result = num1.compareTo(num2);
} else if (v1 instanceof Date && v2 instanceof Date) {
Date date1 = (Date)v1;
Date date2 = (Date)v2;
result = date1.compareTo(date2);
} else {
result = v1.toString().compareTo(v2.toString());
}
if (result != 0) {
return result;
}
}
return 0;
}
});
isAsc = true;
isDesc = false;
return this;
}
public DataSet desc(final String... keys) {
Collections.sort(rows, new Comparator<DataRow>() {
public int compare(DataRow r1, DataRow r2) {
int result = 0;
for (String key : keys) {
Object v1 = r1.get(key);
Object v2 = r2.get(key);
if (null == v1) {
if (null == v2) {
continue;
}
return 1;
} else {
if (null == v2) {
return -1;
}
}
if (BasicUtil.isNumber(v1) && BasicUtil.isNumber(v2)) {
BigDecimal val1 = new BigDecimal(v1.toString());
BigDecimal val2 = new BigDecimal(v2.toString());
result = val2.compareTo(val1);
} else if (v1 instanceof Date && v2 instanceof Date) {
Date date1 = (Date)v1;
Date date2 = (Date)v2;
result = date2.compareTo(date1);
} else {
result = v2.toString().compareTo(v1.toString());
}
if (result != 0) {
return result;
}
}
return 0;
}
});
isAsc = false;
isDesc = true;
return this;
}
public DataSet addAllUpdateColumns() {
for (DataRow row : rows) {
row.addAllUpdateColumns();
}
return this;
}
public DataSet clearUpdateColumns() {
for (DataRow row : rows) {
row.clearUpdateColumns();
}
return this;
}
public DataSet removeNull(String... keys) {
for (DataRow row : rows) {
row.removeNull(keys);
}
return this;
}
private static String key(String key) {
if (null != key && ConfigTable.IS_UPPER_KEY) {
key = key.toUpperCase();
}
return key;
}
/**
* 替换所有NULL值
*
* @param value value
* @return return
*/
public DataSet replaceNull(String value) {
for (DataRow row : rows) {
row.replaceNull(value);
}
return this;
}
/**
* 替换所有空值
*
* @param value value
* @return return
*/
public DataSet replaceEmpty(String value) {
for (DataRow row : rows) {
row.replaceEmpty(value);
}
return this;
}
/**
* 替换所有NULL值
*
* @param key key
* @param value value
* @return return
*/
public DataSet replaceNull(String key, String value) {
for (DataRow row : rows) {
row.replaceNull(key, value);
}
return this;
}
/**
* 替换所有空值
*
* @param key key
* @param value value
* @return return
*/
public DataSet replaceEmpty(String key, String value) {
for (DataRow row : rows) {
row.replaceEmpty(key, value);
}
return this;
}
public DataSet replace(String key, String oldChar, String newChar) {
if (null == key || null == oldChar || null == newChar) {
return this;
}
for (DataRow row : rows) {
row.replace(key, oldChar, newChar);
}
return this;
}
public DataSet replace(String oldChar, String newChar) {
for (DataRow row : rows) {
row.replace(oldChar, newChar);
}
return this;
}
/* ************************* 类sql操作 ************************************** */
/**
* 随机取一行
* @return DataRow
*/
public DataRow random() {
DataRow row = null;
int size = size();
if (size > 0) {
row = getRow(BasicUtil.getRandomNumber(0, size - 1));
}
return row;
}
/**
* 随机取qty行
* @param qty 行数
* @return DataSet
*/
public DataSet randoms(int qty) {
DataSet set = new DataSet();
int size = size();
if (qty < 0) {
qty = 0;
}
if (qty > size) {
qty = size;
}
for (int i = 0; i < qty; i++) {
while (true) {
int idx = BasicUtil.getRandomNumber(0, size - 1);
DataRow row = set.getRow(idx);
if (!set.contains(row)) {
set.add(row);
break;
}
}
}
set.cloneProperty(this);
return set;
}
/**
* 随机取min到max行
* @param min min
* @param max max
* @return DataSet
*/
public DataSet randoms(int min, int max) {
int qty = BasicUtil.getRandomNumber(min, max);
return randoms(qty);
}
public DataSet unique(String... keys) {
return distinct(keys);
}
/**
* 根据正则提取集合
* @param key key
* @param regex 正则
* @param mode 匹配方式
* @return DataSet
*/
public DataSet regex(String key, String regex, Regular.MATCH_MODE mode) {
DataSet set = new DataSet();
String tmpValue;
for (DataRow row : this) {
tmpValue = row.getString(key);
if (RegularUtil.match(tmpValue, regex, mode)) {
set.add(row);
}
}
set.cloneProperty(this);
return set;
}
public DataSet regex(String key, String regex) {
return regex(key, regex, Regular.MATCH_MODE.MATCH);
}
public boolean checkRequired(String... keys) {
for (DataRow row : rows) {
if (!row.checkRequired(keys)) {
return false;
}
}
return true;
}
public Map<String, Object> getQueryParams() {
return queryParams;
}
public DataSet setQueryParams(Map<String, Object> params) {
this.queryParams = params;
return this;
}
public Object getQueryParam(String key) {
return queryParams.get(key);
}
public DataSet addQueryParam(String key, Object param) {
queryParams.put(key, param);
return this;
}
public String getDatalink() {
return datalink;
}
public void setDatalink(String datalink) {
this.datalink = datalink;
}
public class Select implements Serializable {
private static final long serialVersionUID = 1L;
private boolean ignoreCase = true; //是否忽略大小写
/**
* 是否忽略NULL 如果设置成true 在执行equal notEqual like contains进 null与null比较返回false
* 左右出现NULL时直接返回false
* true会导致一行数据 equal notEqual都筛选不到
*/
private boolean ignoreNull = true;
public DataSet setIgnoreCase(boolean bol) {
this.ignoreCase = bol;
return DataSet.this;
}
public DataSet setIgnoreNull(boolean bol) {
this.ignoreNull = bol;
return DataSet.this;
}
/**
* 筛选key=value的子集
*
* @param key key
* @param value value
* @return DataSet
*/
public DataSet equals(String key, String value) {
return equals(DataSet.this, key, value);
}
private DataSet equals(DataSet src, String key, String value) {
DataSet set = new DataSet();
String tmpValue;
for (DataRow row : src) {
tmpValue = row.getString(key);
if (ignoreNull) {
if (null == tmpValue || null == value) {
continue;
}
} else {
if (null == tmpValue && null == value) {
set.add(row);
continue;
}
}
if (null != tmpValue) {
boolean chk = false;
if (ignoreCase) {
chk = tmpValue.equalsIgnoreCase(value);
} else {
chk = tmpValue.equals(value);
}
if (chk) {
set.add(row);
}
}
}
set.cloneProperty(src);
return set;
}
/**
* 筛选key != value的子集
*
* @param key key
* @param value value
* @return DataSet
*/
public DataSet notEquals(String key, String value) {
return notEquals(DataSet.this, key, value);
}
private DataSet notEquals(DataSet src, String key, String value) {
DataSet set = new DataSet();
String tmpValue;
for (DataRow row : src) {
tmpValue = row.getString(key);
if (ignoreNull) {
if (null == tmpValue || null == value) {
continue;
}
} else {
if (null == tmpValue && null == value) {
set.add(row);
continue;
}
}
if (null != tmpValue) {
boolean chk = false;
if (ignoreCase) {
chk = !tmpValue.equalsIgnoreCase(value);
} else {
chk = !tmpValue.equals(value);
}
if (chk) {
set.add(row);
}
}
}
set.cloneProperty(src);
return set;
}
/**
* 筛选key列的值是否包含value的子集
*
* @param key key
* @param value value
* @return DataSet
*/
public DataSet contains(String key, String value) {
return contains(DataSet.this, key, value);
}
private DataSet contains(DataSet src, String key, String value) {
DataSet set = new DataSet();
String tmpValue;
for (DataRow row : src) {
tmpValue = row.getString(key);
if (ignoreNull) {
if (null == tmpValue || null == value) {
continue;
}
} else {
if (null == tmpValue && null == value) {
set.add(row);
continue;
}
}
if (null != tmpValue) {
if (null == value) {
continue;
}
if (ignoreCase) {
tmpValue = tmpValue.toLowerCase();
value = value.toLowerCase();
}
if (tmpValue.contains(value)) {
set.add(row);
}
}
}
set.cloneProperty(src);
return set;
}
/**
* 筛选key列的值like pattern的子集,pattern遵循sql通配符的规则,%表示任意个字符,_表示一个字符
*
* @param key 列
* @param pattern 表达式
* @return DataSet
*/
public DataSet like(String key, String pattern) {
return like(DataSet.this, key, pattern);
}
private DataSet like(DataSet src, String key, String pattern) {
DataSet set = new DataSet();
if (null != pattern) {
pattern = pattern.replace("!", "^").replace("_", "\\s|\\S").replace("%", "(\\s|\\S)*");
}
String tmpValue;
for (DataRow row : src) {
tmpValue = row.getString(key);
if (ignoreNull) {
if (null == tmpValue || null == pattern) {
continue;
}
} else {
if (null == tmpValue && null == pattern) {
set.add(row);
continue;
}
}
if (null != tmpValue) {
if (null == pattern) {
continue;
}
if (ignoreCase) {
tmpValue = tmpValue.toLowerCase();
pattern = pattern.toLowerCase();
}
if (RegularUtil.match(tmpValue, pattern, Regular.MATCH_MODE.MATCH)) {
set.add(row);
}
}
}
set.cloneProperty(src);
return set;
}
public DataSet notLike(String key, String pattern) {
return notLike(DataSet.this, key, pattern);
}
private DataSet notLike(DataSet src, String key, String pattern) {
DataSet set = new DataSet();
if (null == pattern) {
return set;
}
pattern = pattern.replace("!", "^").replace("_", "\\s|\\S").replace("%", "(\\s|\\S)*");
String tmpValue;
for (DataRow row : src) {
tmpValue = row.getString(key);
if (ignoreNull) {
if (null == tmpValue || null == pattern) {
continue;
}
} else {
if (null == tmpValue && null == pattern) {
set.add(row);
continue;
}
}
if (null != tmpValue) {
if (null == pattern) {
continue;
}
if (ignoreCase) {
tmpValue = tmpValue.toLowerCase();
pattern = pattern.toLowerCase();
}
if (!RegularUtil.match(tmpValue, pattern, Regular.MATCH_MODE.MATCH)) {
set.add(row);
}
}
}
set.cloneProperty(src);
return set;
}
public DataSet startWith(String key, String prefix) {
return startWith(DataSet.this, key, prefix);
}
private DataSet startWith(DataSet src, String key, String prefix) {
DataSet set = new DataSet();
String tmpValue;
for (DataRow row : src) {
tmpValue = row.getString(key);
if (ignoreNull) {
if (null == tmpValue || null == prefix) {
continue;
}
} else {
if (null == tmpValue && null == prefix) {
set.add(row);
continue;
}
}
if (null != tmpValue) {
if (null == prefix) {
continue;
}
if (ignoreCase) {
tmpValue = tmpValue.toLowerCase();
prefix = prefix.toLowerCase();
}
if (tmpValue.startsWith(prefix)) {
set.add(row);
}
}
}
set.cloneProperty(src);
return set;
}
public DataSet endWith(String key, String suffix) {
return endWith(DataSet.this, key, suffix);
}
private DataSet endWith(DataSet src, String key, String suffix) {
DataSet set = new DataSet();
String tmpValue;
for (DataRow row : src) {
tmpValue = row.getString(key);
if (ignoreNull) {
if (null == tmpValue || null == suffix) {
continue;
}
} else {
if (null == tmpValue && null == suffix) {
set.add(row);
continue;
}
}
if (null != tmpValue) {
if (null == suffix) {
continue;
}
if (ignoreCase) {
tmpValue = tmpValue.toLowerCase();
suffix = suffix.toLowerCase();
}
if (tmpValue.endsWith(suffix)) {
set.add(row);
}
}
}
set.cloneProperty(src);
return set;
}
public <T> DataSet in(String key, T... values) {
return in(DataSet.this, key, BeanUtil.array2list(values));
}
public <T> DataSet in(String key, Collection<T> values) {
return in(DataSet.this, key, values);
}
private <T> DataSet in(DataSet src, String key, Collection<T> values) {
DataSet set = new DataSet();
for (DataRow row : src) {
if (BasicUtil.containsString(ignoreNull, ignoreCase, values, row.getString(key))) {
set.add(row);
}
}
set.cloneProperty(src);
return set;
}
public <T> DataSet notIn(String key, T... values) {
return notIn(DataSet.this, key, BeanUtil.array2list(values));
}
public <T> DataSet notIn(String key, Collection<T> values) {
return notIn(DataSet.this, key, values);
}
private <T> DataSet notIn(DataSet src, String key, Collection<T> values) {
DataSet set = new DataSet();
if (null != values) {
String tmpValue = null;
for (DataRow row : src) {
tmpValue = row.getString(key);
if (ignoreNull && null == tmpValue) {
continue;
}
if (!BasicUtil.containsString(ignoreNull, ignoreCase, values, tmpValue)) {
set.add(row);
}
}
}
set.cloneProperty(src);
return set;
}
public DataSet isNull(String... keys) {
return isNull(DataSet.this, keys);
}
private DataSet isNull(DataSet src, String... keys) {
DataSet set = src;
if (null != keys) {
for (String key : keys) {
set = isNull(set, key);
}
}
return set;
}
private DataSet isNull(DataSet src, String key) {
DataSet set = new DataSet();
for(DataRow row:src){
if(null == row.get(key)){
set.add(row);
}
}
return set;
}
public DataSet isNotNull(String... keys) {
return isNotNull(DataSet.this, keys);
}
private DataSet isNotNull(DataSet src, String... keys) {
DataSet set = src;
if (null != keys) {
for (String key : keys) {
set = isNotNull(set, key);
}
}
return set;
}
private DataSet isNotNull(DataSet src, String key) {
DataSet set = new DataSet();
for(DataRow row:src){
if(null != row.get(key)){
set.add(row);
}
}
return set;
}
public DataSet notNull(String... keys) {
return isNotNull(keys);
}
public DataSet isEmpty(String... keys) {
return isEmpty(DataSet.this, keys);
}
private DataSet isEmpty(DataSet src, String... keys) {
DataSet set = src;
if (null != keys) {
for (String key : keys) {
set = isEmpty(set, key);
}
}
return set;
}
private DataSet isEmpty(DataSet src, String key) {
DataSet set = new DataSet();
for(DataRow row:src){
if(row.isEmpty(key)){
set.add(row);
}
}
return set;
}
public DataSet empty(String... keys) {
return isEmpty(keys);
}
public DataSet isNotEmpty(String... keys) {
return isNotEmpty(DataSet.this, keys);
}
private DataSet isNotEmpty(DataSet src, String... keys) {
DataSet set = src;
if (null != keys) {
for (String key : keys) {
set = isNotEmpty(set, key);
}
}
return set;
}
private DataSet isNotEmpty(DataSet src, String key) {
DataSet set = new DataSet();
for(DataRow row:src){
if(row.isNotEmpty(key)){
set.add(row);
}
}
return set;
}
public DataSet notEmpty(String... keys) {
return isNotEmpty(keys);
}
public <T> DataSet less(String key, T value) {
return less(DataSet.this, key, value);
}
private <T> DataSet less(DataSet src, String key, T value) {
DataSet set = new DataSet();
if (null == value) {
return set;
}
if (BasicUtil.isNumber(value)) {
BigDecimal number = new BigDecimal(value.toString());
for (DataRow row : src) {
if (null == row.get(key)) {
continue;
}
if (row.getDecimal(key, 0).compareTo(number) < 0) {
set.add(row);
}
}
} else if (BasicUtil.isDate(value) || BasicUtil.isDateTime(value)) {
Date date = DateUtil.parse(value.toString());
for (DataRow row : src) {
if (null == row.get(key)) {
continue;
}
if (row.isNotEmpty(key) &&
DateUtil.diff(DateUtil.DATE_PART_MILLISECOND, date, row.getDate(key, new Date())) < 0) {
set.add(row);
}
}
} else {
for (DataRow row : src) {
if (null == row.get(key)) {
continue;
}
if (row.getString(key).compareTo(value.toString()) < 0) {
set.add(row);
}
}
}
set.cloneProperty(src);
return set;
}
public <T> DataSet lessEqual(String key, T value) {
return lessEqual(DataSet.this, key, value);
}
private <T> DataSet lessEqual(DataSet src, String key, T value) {
DataSet set = new DataSet();
if (null == value) {
return set;
}
if (BasicUtil.isNumber(value)) {
BigDecimal number = new BigDecimal(value.toString());
for (DataRow row : src) {
if (null == row.get(key)) {
continue;
}
if (row.getDecimal(key, 0).compareTo(number) <= 0) {
set.add(row);
}
}
} else if (BasicUtil.isDate(value) || BasicUtil.isDateTime(value)) {
Date date = DateUtil.parse(value.toString());
for (DataRow row : src) {
if (null == row.get(key)) {
continue;
}
if (row.isNotEmpty(key) &&
DateUtil.diff(DateUtil.DATE_PART_MILLISECOND, date, row.getDate(key, new Date())) <= 0) {
set.add(row);
}
}
} else {
for (DataRow row : src) {
if (null == row.get(key)) {
continue;
}
if (row.getString(key).compareTo(value.toString()) >= 0) {
set.add(row);
}
}
}
set.cloneProperty(src);
return set;
}
public <T> DataSet greater(String key, T value) {
return greater(DataSet.this, key, value);
}
private <T> DataSet greater(DataSet src, String key, T value) {
DataSet set = new DataSet();
if (null == value) {
return set;
}
if (BasicUtil.isNumber(value)) {
BigDecimal number = new BigDecimal(value.toString());
for (DataRow row : src) {
if (null == row.get(key)) {
continue;
}
if (row.getDecimal(key, 0).compareTo(number) > 0) {
set.add(row);
}
}
} else if (BasicUtil.isDate(value) || BasicUtil.isDateTime(value)) {
Date date = DateUtil.parse(value.toString());
for (DataRow row : src) {
if (null == row.get(key)) {
continue;
}
if (row.isNotEmpty(key) &&
DateUtil.diff(DateUtil.DATE_PART_MILLISECOND, date, row.getDate(key, new Date())) > 0) {
set.add(row);
}
}
} else {
for (DataRow row : src) {
if (null == row.get(key)) {
continue;
}
if (row.getString(key).compareTo(value.toString()) > 0) {
set.add(row);
}
}
}
set.cloneProperty(src);
return set;
}
public <T> DataSet greaterEqual(String key, T value) {
return greaterEqual(DataSet.this, key, value);
}
private <T> DataSet greaterEqual(DataSet src, String key, T value) {
DataSet set = new DataSet();
if (null == value) {
return set;
}
if (BasicUtil.isNumber(value)) {
BigDecimal number = new BigDecimal(value.toString());
for (DataRow row : src) {
if (null == row.get(key)) {
continue;
}
if (row.getDecimal(key, 0).compareTo(number) >= 0) {
set.add(row);
}
}
} else if (BasicUtil.isDate(value) || BasicUtil.isDateTime(value)) {
Date date = DateUtil.parse(value.toString());
for (DataRow row : src) {
if (null == row.get(key)) {
continue;
}
if (row.isNotEmpty(key) &&
DateUtil.diff(DateUtil.DATE_PART_MILLISECOND, date, row.getDate(key, new Date())) >= 0) {
set.add(row);
}
}
} else {
for (DataRow row : src) {
if (null == row.get(key)) {
continue;
}
if (row.getString(key).compareTo(value.toString()) >= 0) {
set.add(row);
}
}
}
set.cloneProperty(src);
return set;
}
public <T> DataSet between(String key, T min, T max) {
return between(DataSet.this, key, min, max);
}
private <T> DataSet between(DataSet src, String key, T min, T max) {
DataSet set = greaterEqual(src, key, min);
set = lessEqual(set, key, max);
return set;
}
}
public Select select = new Select();
} | anylineorg/anyline | anyline-core/src/main/java/org/anyline/entity/DataSet.java | Java | apache-2.0 | 115,477 |
package ua.job4j.loop;
/**
* Class Класс для вычисления факториала заданного числа.
* @author vfrundin
* @since 05.11.2017
* @version 1.0
*/
public class Factorial {
/**
* Метод должен вычислять факториал поданного на вход числа.
* @param n Число для которого нужно определить факториал.
* @return result - найденный факториал числа n.
*/
public int calc(int n) {
int result = 1;
if (n != 0) {
for (int i = 1; i <= n; i++) {
result *= i;
}
}
return result;
}
}
| Krok3/junior | chapter_001/src/main/java/ua/job4j/loop/Factorial.java | Java | apache-2.0 | 728 |
/* Copyright (C) 2013-2022 TU Dortmund
* This file is part of AutomataLib, http://www.automatalib.net/.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.automatalib.graphs.base.compact;
import net.automatalib.commons.smartcollections.ResizingArrayStorage;
import org.checkerframework.checker.nullness.qual.Nullable;
public class CompactBidiGraph<@Nullable NP, @Nullable EP> extends AbstractCompactBidiGraph<NP, EP> {
private final ResizingArrayStorage<NP> nodeProperties;
public CompactBidiGraph() {
this.nodeProperties = new ResizingArrayStorage<>(Object.class);
}
public CompactBidiGraph(int initialCapacity) {
super(initialCapacity);
this.nodeProperties = new ResizingArrayStorage<>(Object.class, initialCapacity);
}
@Override
public void setNodeProperty(int node, @Nullable NP property) {
nodeProperties.ensureCapacity(node + 1);
nodeProperties.array[node] = property;
}
@Override
public NP getNodeProperty(int node) {
return node < nodeProperties.array.length ? nodeProperties.array[node] : null;
}
}
| LearnLib/automatalib | core/src/main/java/net/automatalib/graphs/base/compact/CompactBidiGraph.java | Java | apache-2.0 | 1,632 |
package org.clinical3PO.common.security;
import java.util.Collection;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.authentication.AuthenticationProvider;
import org.springframework.security.authentication.BadCredentialsException;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.stereotype.Component;
import org.clinical3PO.common.security.model.User;
import org.clinical3PO.common.security.service.UserService;
@Component
public class CustomAuthenticationProvider implements AuthenticationProvider {
@Autowired
private UserService userService;
@Override
public Authentication authenticate(Authentication authentication) throws AuthenticationException {
String username = authentication.getName();
String password = (String) authentication.getCredentials();
User user = userService.loadUserByUsername(username);
if (user == null) {
throw new BadCredentialsException("Username not found.");
}
if (!password.equals(user.getPassword())) {
throw new BadCredentialsException("Wrong password.");
}
Collection<? extends GrantedAuthority> authorities = user.getAuthorities();
return new UsernamePasswordAuthenticationToken(user, password, authorities);
}
@Override
public boolean supports(Class<?> arg0) {
return true;
}
}
| Clinical3PO/Platform | dev/clinical3PO/app/src/main/java/org/clinical3PO/common/security/CustomAuthenticationProvider.java | Java | apache-2.0 | 1,572 |
<?php
/**
* This file is part of the SevenShores/NetSuite library
* AND originally from the NetSuite PHP Toolkit.
*
* New content:
* @package ryanwinchester/netsuite-php
* @copyright Copyright (c) Ryan Winchester
* @license http://www.apache.org/licenses/LICENSE-2.0 Apache-2.0
* @link https://github.com/ryanwinchester/netsuite-php
*
* Original content:
* @copyright Copyright (c) NetSuite Inc.
* @license https://raw.githubusercontent.com/ryanwinchester/netsuite-php/master/original/NetSuite%20Application%20Developer%20License%20Agreement.txt
* @link http://www.netsuite.com/portal/developers/resources/suitetalk-sample-applications.shtml
*
* generated: 2020-04-10 09:56:55 PM UTC
*/
namespace NetSuite\Classes;
class NSSoapFault {
/**
* @var \NetSuite\Classes\FaultCodeType
*/
public $code;
/**
* @var string
*/
public $message;
static $paramtypesmap = array(
"code" => "FaultCodeType",
"message" => "string",
);
}
| RyanWinchester/netsuite-php | src/Classes/NSSoapFault.php | PHP | apache-2.0 | 1,021 |
package com.example.mywechat.utils;
import java.util.ArrayList;
import java.util.List;
import android.app.Activity;
/**
* ActivityCollector ÀàÓÃÓÚ¹ÜÀíËùÓеĻ
* @author dzhiqin
*
*/
public class ActivityCollector {
public static List<Activity> activities=new ArrayList<Activity>();
public static void addActivity(Activity activity){
activities.add(activity);
}
public static void removeActivity(Activity activity){
activities.remove(activity);
}
/**
* ¹Ø±ÕËùÓл
*/
public static void finishAll(){
for(Activity activity:activities){
if(!activity.isFinishing()){
activity.finish();
}
}
}
public ActivityCollector() {
// TODO ×Ô¶¯Éú³ÉµÄ¹¹Ô캯Êý´æ¸ù
}
}
| dzhiqin/MyWeChat | src/com/example/mywechat/utils/ActivityCollector.java | Java | apache-2.0 | 697 |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def strip_region_tags(sample_text):
"""Remove blank lines and region tags from sample text"""
magic_lines = [
line for line in sample_text.split("\n") if len(line) > 0 and "# [" not in line
]
return "\n".join(magic_lines)
| googleapis/python-bigquery | samples/magics/_helpers.py | Python | apache-2.0 | 823 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 Cleidimar Viana
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.seamusdawkins.tablayout.fragments;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.seamusdawkins.tablayout.R;
public class FirstFragment extends Fragment {
TextView tv;
RelativeLayout rl;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_one, container, false);
tv = (TextView) rootView.findViewById(R.id.action);
tv.setText(R.string.str_first);
return rootView;
}
}
| cleidimarviana/Tabs-Material | app/src/main/java/com/seamusdawkins/tablayout/fragments/FirstFragment.java | Java | apache-2.0 | 1,928 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gora.cassandra.store;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import me.prettyprint.cassandra.model.ConfigurableConsistencyLevel;
import me.prettyprint.cassandra.serializers.ByteBufferSerializer;
import me.prettyprint.cassandra.serializers.IntegerSerializer;
import me.prettyprint.cassandra.serializers.StringSerializer;
import me.prettyprint.cassandra.service.CassandraHostConfigurator;
import me.prettyprint.hector.api.Cluster;
import me.prettyprint.hector.api.Keyspace;
import me.prettyprint.hector.api.beans.OrderedRows;
import me.prettyprint.hector.api.beans.OrderedSuperRows;
import me.prettyprint.hector.api.beans.Row;
import me.prettyprint.hector.api.beans.SuperRow;
import me.prettyprint.hector.api.ddl.ColumnFamilyDefinition;
import me.prettyprint.hector.api.ddl.ComparatorType;
import me.prettyprint.hector.api.ddl.KeyspaceDefinition;
import me.prettyprint.hector.api.factory.HFactory;
import me.prettyprint.hector.api.mutation.Mutator;
import me.prettyprint.hector.api.query.QueryResult;
import me.prettyprint.hector.api.query.RangeSlicesQuery;
import me.prettyprint.hector.api.query.RangeSuperSlicesQuery;
import me.prettyprint.hector.api.HConsistencyLevel;
import me.prettyprint.hector.api.Serializer;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Type;
import org.apache.avro.generic.GenericArray;
import org.apache.avro.util.Utf8;
import org.apache.gora.cassandra.query.CassandraQuery;
import org.apache.gora.cassandra.serializers.GenericArraySerializer;
import org.apache.gora.cassandra.serializers.GoraSerializerTypeInferer;
import org.apache.gora.cassandra.serializers.TypeUtils;
import org.apache.gora.mapreduce.GoraRecordReader;
import org.apache.gora.persistency.Persistent;
import org.apache.gora.persistency.impl.PersistentBase;
import org.apache.gora.persistency.State;
import org.apache.gora.persistency.StatefulHashMap;
import org.apache.gora.query.Query;
import org.apache.gora.util.ByteUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class CassandraClient<K, T extends PersistentBase> {
public static final Logger LOG = LoggerFactory.getLogger(CassandraClient.class);
private Cluster cluster;
private Keyspace keyspace;
private Mutator<K> mutator;
private Class<K> keyClass;
private Class<T> persistentClass;
private CassandraMapping cassandraMapping = null;
private Serializer<K> keySerializer;
public void initialize(Class<K> keyClass, Class<T> persistentClass) throws Exception {
this.keyClass = keyClass;
// get cassandra mapping with persistent class
this.persistentClass = persistentClass;
this.cassandraMapping = CassandraMappingManager.getManager().get(persistentClass);
// LOG.info("persistentClass=" + persistentClass.getName() + " -> cassandraMapping=" + cassandraMapping);
this.cluster = HFactory.getOrCreateCluster(this.cassandraMapping.getClusterName(), new CassandraHostConfigurator(this.cassandraMapping.getHostName()));
// add keyspace to cluster
checkKeyspace();
// Just create a Keyspace object on the client side, corresponding to an already existing keyspace with already created column families.
this.keyspace = HFactory.createKeyspace(this.cassandraMapping.getKeyspaceName(), this.cluster);
this.keySerializer = GoraSerializerTypeInferer.getSerializer(keyClass);
this.mutator = HFactory.createMutator(this.keyspace, this.keySerializer);
}
/**
* Check if keyspace already exists.
*/
public boolean keyspaceExists() {
KeyspaceDefinition keyspaceDefinition = this.cluster.describeKeyspace(this.cassandraMapping.getKeyspaceName());
return (keyspaceDefinition != null);
}
/**
* Check if keyspace already exists. If not, create it.
* In this method, we also utilise Hector's {@ConfigurableConsistencyLevel}
* logic. It is set by passing a ConfigurableConsistencyLevel object right
* when the Keyspace is created. Currently consistency level is .ONE which
* permits consistency to wait until one replica has responded.
*/
public void checkKeyspace() {
// "describe keyspace <keyspaceName>;" query
KeyspaceDefinition keyspaceDefinition = this.cluster.describeKeyspace(this.cassandraMapping.getKeyspaceName());
if (keyspaceDefinition == null) {
List<ColumnFamilyDefinition> columnFamilyDefinitions = this.cassandraMapping.getColumnFamilyDefinitions();
// GORA-197
for (ColumnFamilyDefinition cfDef : columnFamilyDefinitions) {
cfDef.setComparatorType(ComparatorType.BYTESTYPE);
}
keyspaceDefinition = HFactory.createKeyspaceDefinition(this.cassandraMapping.getKeyspaceName(), "org.apache.cassandra.locator.SimpleStrategy", 1, columnFamilyDefinitions);
this.cluster.addKeyspace(keyspaceDefinition, true);
// LOG.info("Keyspace '" + this.cassandraMapping.getKeyspaceName() + "' in cluster '" + this.cassandraMapping.getClusterName() + "' was created on host '" + this.cassandraMapping.getHostName() + "'");
// Create a customized Consistency Level
ConfigurableConsistencyLevel configurableConsistencyLevel = new ConfigurableConsistencyLevel();
Map<String, HConsistencyLevel> clmap = new HashMap<String, HConsistencyLevel>();
// Define CL.ONE for ColumnFamily "ColumnFamily"
clmap.put("ColumnFamily", HConsistencyLevel.ONE);
// In this we use CL.ONE for read and writes. But you can use different CLs if needed.
configurableConsistencyLevel.setReadCfConsistencyLevels(clmap);
configurableConsistencyLevel.setWriteCfConsistencyLevels(clmap);
// Then let the keyspace know
HFactory.createKeyspace("Keyspace", this.cluster, configurableConsistencyLevel);
keyspaceDefinition = null;
}
else {
List<ColumnFamilyDefinition> cfDefs = keyspaceDefinition.getCfDefs();
if (cfDefs == null || cfDefs.size() == 0) {
LOG.warn(keyspaceDefinition.getName() + " does not have any column family.");
}
else {
for (ColumnFamilyDefinition cfDef : cfDefs) {
ComparatorType comparatorType = cfDef.getComparatorType();
if (! comparatorType.equals(ComparatorType.BYTESTYPE)) {
// GORA-197
LOG.warn("The comparator type of " + cfDef.getName() + " column family is " + comparatorType.getTypeName()
+ ", not BytesType. It may cause a fatal error on column validation later.");
}
else {
// LOG.info("The comparator type of " + cfDef.getName() + " column family is " + comparatorType.getTypeName() + ".");
}
}
}
}
}
/**
* Drop keyspace.
*/
public void dropKeyspace() {
// "drop keyspace <keyspaceName>;" query
this.cluster.dropKeyspace(this.cassandraMapping.getKeyspaceName());
}
/**
* Insert a field in a column.
* @param key the row key
* @param fieldName the field name
* @param value the field value.
*/
public void addColumn(K key, String fieldName, Object value) {
if (value == null) {
return;
}
ByteBuffer byteBuffer = toByteBuffer(value);
String columnFamily = this.cassandraMapping.getFamily(fieldName);
String columnName = this.cassandraMapping.getColumn(fieldName);
if (columnName == null) {
LOG.warn("Column name is null for field=" + fieldName + " with value=" + value.toString());
return;
}
synchronized(mutator) {
HectorUtils.insertColumn(mutator, key, columnFamily, columnName, byteBuffer);
}
}
/**
* Insert a member in a super column. This is used for map and record Avro types.
* @param key the row key
* @param fieldName the field name
* @param columnName the column name (the member name, or the index of array)
* @param value the member value
*/
@SuppressWarnings("unchecked")
public void addSubColumn(K key, String fieldName, ByteBuffer columnName, Object value) {
if (value == null) {
return;
}
ByteBuffer byteBuffer = toByteBuffer(value);
String columnFamily = this.cassandraMapping.getFamily(fieldName);
String superColumnName = this.cassandraMapping.getColumn(fieldName);
synchronized(mutator) {
HectorUtils.insertSubColumn(mutator, key, columnFamily, superColumnName, columnName, byteBuffer);
}
}
public void addSubColumn(K key, String fieldName, String columnName, Object value) {
addSubColumn(key, fieldName, StringSerializer.get().toByteBuffer(columnName), value);
}
public void addSubColumn(K key, String fieldName, Integer columnName, Object value) {
addSubColumn(key, fieldName, IntegerSerializer.get().toByteBuffer(columnName), value);
}
/**
* Delete a member in a super column. This is used for map and record Avro types.
* @param key the row key
* @param fieldName the field name
* @param columnName the column name (the member name, or the index of array)
*/
@SuppressWarnings("unchecked")
public void deleteSubColumn(K key, String fieldName, ByteBuffer columnName) {
String columnFamily = this.cassandraMapping.getFamily(fieldName);
String superColumnName = this.cassandraMapping.getColumn(fieldName);
synchronized(mutator) {
HectorUtils.deleteSubColumn(mutator, key, columnFamily, superColumnName, columnName);
}
}
public void deleteSubColumn(K key, String fieldName, String columnName) {
deleteSubColumn(key, fieldName, StringSerializer.get().toByteBuffer(columnName));
}
@SuppressWarnings("unchecked")
public void addGenericArray(K key, String fieldName, GenericArray array) {
if (isSuper( cassandraMapping.getFamily(fieldName) )) {
int i= 0;
for (Object itemValue: array) {
// TODO: hack, do not store empty arrays
if (itemValue instanceof GenericArray<?>) {
if (((GenericArray)itemValue).size() == 0) {
continue;
}
} else if (itemValue instanceof StatefulHashMap<?,?>) {
if (((StatefulHashMap)itemValue).size() == 0) {
continue;
}
}
addSubColumn(key, fieldName, i++, itemValue);
}
}
else {
addColumn(key, fieldName, array);
}
}
@SuppressWarnings("unchecked")
public void addStatefulHashMap(K key, String fieldName, StatefulHashMap<Utf8,Object> map) {
if (isSuper( cassandraMapping.getFamily(fieldName) )) {
int i= 0;
for (Utf8 mapKey: map.keySet()) {
if (map.getState(mapKey) == State.DELETED) {
deleteSubColumn(key, fieldName, mapKey.toString());
continue;
}
// TODO: hack, do not store empty arrays
Object mapValue = map.get(mapKey);
if (mapValue instanceof GenericArray<?>) {
if (((GenericArray)mapValue).size() == 0) {
continue;
}
} else if (mapValue instanceof StatefulHashMap<?,?>) {
if (((StatefulHashMap)mapValue).size() == 0) {
continue;
}
}
addSubColumn(key, fieldName, mapKey.toString(), mapValue);
}
}
else {
addColumn(key, fieldName, map);
}
}
/**
* Serialize value to ByteBuffer.
* @param value the member value
* @return ByteBuffer object
*/
@SuppressWarnings("unchecked")
public ByteBuffer toByteBuffer(Object value) {
ByteBuffer byteBuffer = null;
Serializer serializer = GoraSerializerTypeInferer.getSerializer(value);
if (serializer == null) {
LOG.info("Serializer not found for: " + value.toString());
}
else {
byteBuffer = serializer.toByteBuffer(value);
}
if (byteBuffer == null) {
LOG.info("value class=" + value.getClass().getName() + " value=" + value + " -> null");
}
return byteBuffer;
}
/**
* Select a family column in the keyspace.
* @param cassandraQuery a wrapper of the query
* @param family the family name to be queried
* @return a list of family rows
*/
public List<Row<K, ByteBuffer, ByteBuffer>> execute(CassandraQuery<K, T> cassandraQuery, String family) {
String[] columnNames = cassandraQuery.getColumns(family);
ByteBuffer[] columnNameByteBuffers = new ByteBuffer[columnNames.length];
for (int i = 0; i < columnNames.length; i++) {
columnNameByteBuffers[i] = StringSerializer.get().toByteBuffer(columnNames[i]);
}
Query<K, T> query = cassandraQuery.getQuery();
int limit = (int) query.getLimit();
if (limit < 1) {
limit = Integer.MAX_VALUE;
}
K startKey = query.getStartKey();
K endKey = query.getEndKey();
RangeSlicesQuery<K, ByteBuffer, ByteBuffer> rangeSlicesQuery = HFactory.createRangeSlicesQuery(this.keyspace, this.keySerializer, ByteBufferSerializer.get(), ByteBufferSerializer.get());
rangeSlicesQuery.setColumnFamily(family);
rangeSlicesQuery.setKeys(startKey, endKey);
rangeSlicesQuery.setRange(ByteBuffer.wrap(new byte[0]), ByteBuffer.wrap(new byte[0]), false, GoraRecordReader.BUFFER_LIMIT_READ_VALUE);
rangeSlicesQuery.setRowCount(limit);
rangeSlicesQuery.setColumnNames(columnNameByteBuffers);
QueryResult<OrderedRows<K, ByteBuffer, ByteBuffer>> queryResult = rangeSlicesQuery.execute();
OrderedRows<K, ByteBuffer, ByteBuffer> orderedRows = queryResult.get();
return orderedRows.getList();
}
/**
* Select the families that contain at least one column mapped to a query field.
* @param query indicates the columns to select
* @return a map which keys are the family names and values the corresponding column names required to get all the query fields.
*/
public Map<String, List<String>> getFamilyMap(Query<K, T> query) {
Map<String, List<String>> map = new HashMap<String, List<String>>();
for (String field: query.getFields()) {
String family = this.cassandraMapping.getFamily(field);
String column = this.cassandraMapping.getColumn(field);
// check if the family value was already initialized
List<String> list = map.get(family);
if (list == null) {
list = new ArrayList<String>();
map.put(family, list);
}
if (column != null) {
list.add(column);
}
}
return map;
}
/**
* Select the field names according to the column names, which format if fully qualified: "family:column"
* @param query
* @return a map which keys are the fully qualified column names and values the query fields
*/
public Map<String, String> getReverseMap(Query<K, T> query) {
Map<String, String> map = new HashMap<String, String>();
for (String field: query.getFields()) {
String family = this.cassandraMapping.getFamily(field);
String column = this.cassandraMapping.getColumn(field);
map.put(family + ":" + column, field);
}
return map;
}
public boolean isSuper(String family) {
return this.cassandraMapping.isSuper(family);
}
public List<SuperRow<K, String, ByteBuffer, ByteBuffer>> executeSuper(CassandraQuery<K, T> cassandraQuery, String family) {
String[] columnNames = cassandraQuery.getColumns(family);
Query<K, T> query = cassandraQuery.getQuery();
int limit = (int) query.getLimit();
if (limit < 1) {
limit = Integer.MAX_VALUE;
}
K startKey = query.getStartKey();
K endKey = query.getEndKey();
RangeSuperSlicesQuery<K, String, ByteBuffer, ByteBuffer> rangeSuperSlicesQuery = HFactory.createRangeSuperSlicesQuery(this.keyspace, this.keySerializer, StringSerializer.get(), ByteBufferSerializer.get(), ByteBufferSerializer.get());
rangeSuperSlicesQuery.setColumnFamily(family);
rangeSuperSlicesQuery.setKeys(startKey, endKey);
rangeSuperSlicesQuery.setRange("", "", false, GoraRecordReader.BUFFER_LIMIT_READ_VALUE);
rangeSuperSlicesQuery.setRowCount(limit);
rangeSuperSlicesQuery.setColumnNames(columnNames);
QueryResult<OrderedSuperRows<K, String, ByteBuffer, ByteBuffer>> queryResult = rangeSuperSlicesQuery.execute();
OrderedSuperRows<K, String, ByteBuffer, ByteBuffer> orderedRows = queryResult.get();
return orderedRows.getList();
}
/**
* Obtain Schema/Keyspace name
* @return Keyspace
*/
public String getKeyspaceName() {
return this.cassandraMapping.getKeyspaceName();
}
}
| prateekbansal/apache-gora-0.4 | gora-cassandra/src/main/java/org/apache/gora/cassandra/store/CassandraClient.java | Java | apache-2.0 | 17,283 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.io.sstable;
import java.io.IOException;
import java.util.Iterator;
import com.google.common.util.concurrent.RateLimiter;
import com.google.common.collect.AbstractIterator;
import org.apache.cassandra.db.DataRange;
import org.apache.cassandra.db.DecoratedKey;
import org.apache.cassandra.db.RowIndexEntry;
import org.apache.cassandra.db.RowPosition;
import org.apache.cassandra.db.columniterator.IColumnIteratorFactory;
import org.apache.cassandra.db.columniterator.LazyColumnIterator;
import org.apache.cassandra.db.columniterator.OnDiskAtomIterator;
import org.apache.cassandra.db.compaction.ICompactionScanner;
import org.apache.cassandra.db.filter.QueryFilter;
import org.apache.cassandra.dht.AbstractBounds;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.io.util.FileUtils;
import org.apache.cassandra.io.util.RandomAccessReader;
import org.apache.cassandra.utils.ByteBufferUtil;
public class SSTableScanner implements ICompactionScanner
{
protected final RandomAccessReader dfile;
protected final RandomAccessReader ifile;
public final SSTableReader sstable;
private final DataRange dataRange;
private final long stopAt;
protected Iterator<OnDiskAtomIterator> iterator;
/**
* @param sstable SSTable to scan; must not be null
* @param filter range of data to fetch; must not be null
* @param limiter background i/o RateLimiter; may be null
*/
SSTableScanner(SSTableReader sstable, DataRange dataRange, RateLimiter limiter)
{
assert sstable != null;
this.dfile = limiter == null ? sstable.openDataReader() : sstable.openDataReader(limiter);
this.ifile = sstable.openIndexReader();
this.sstable = sstable;
this.dataRange = dataRange;
this.stopAt = computeStopAt();
seekToStart();
}
private void seekToStart()
{
if (dataRange.startKey().isMinimum(sstable.partitioner))
return;
long indexPosition = sstable.getIndexScanPosition(dataRange.startKey());
// -1 means the key is before everything in the sstable. So just start from the beginning.
if (indexPosition == -1)
return;
ifile.seek(indexPosition);
try
{
while (!ifile.isEOF())
{
indexPosition = ifile.getFilePointer();
DecoratedKey indexDecoratedKey = sstable.partitioner.decorateKey(ByteBufferUtil.readWithShortLength(ifile));
int comparison = indexDecoratedKey.compareTo(dataRange.startKey());
if (comparison >= 0)
{
// Found, just read the dataPosition and seek into index and data files
long dataPosition = ifile.readLong();
ifile.seek(indexPosition);
dfile.seek(dataPosition);
break;
}
else
{
RowIndexEntry.serializer.skip(ifile);
}
}
}
catch (IOException e)
{
sstable.markSuspect();
throw new CorruptSSTableException(e, sstable.getFilename());
}
}
private long computeStopAt()
{
AbstractBounds<RowPosition> keyRange = dataRange.keyRange();
if (dataRange.stopKey().isMinimum(sstable.partitioner) || (keyRange instanceof Range && ((Range)keyRange).isWrapAround()))
return dfile.length();
RowIndexEntry position = sstable.getPosition(keyRange.toRowBounds().right, SSTableReader.Operator.GT);
return position == null ? dfile.length() : position.position;
}
public void close() throws IOException
{
FileUtils.close(dfile, ifile);
}
public long getLengthInBytes()
{
return dfile.length();
}
public long getCurrentPosition()
{
return dfile.getFilePointer();
}
public String getBackingFiles()
{
return sstable.toString();
}
public boolean hasNext()
{
if (iterator == null)
iterator = createIterator();
return iterator.hasNext();
}
public OnDiskAtomIterator next()
{
if (iterator == null)
iterator = createIterator();
return iterator.next();
}
public void remove()
{
throw new UnsupportedOperationException();
}
private Iterator<OnDiskAtomIterator> createIterator()
{
return new KeyScanningIterator();
}
protected class KeyScanningIterator extends AbstractIterator<OnDiskAtomIterator>
{
private DecoratedKey nextKey;
private RowIndexEntry nextEntry;
private DecoratedKey currentKey;
private RowIndexEntry currentEntry;
protected OnDiskAtomIterator computeNext()
{
try
{
if (ifile.isEOF() && nextKey == null)
return endOfData();
if (currentKey == null)
{
currentKey = sstable.partitioner.decorateKey(ByteBufferUtil.readWithShortLength(ifile));
currentEntry = RowIndexEntry.serializer.deserialize(ifile, sstable.descriptor.version);
}
else
{
currentKey = nextKey;
currentEntry = nextEntry;
}
assert currentEntry.position <= stopAt;
if (currentEntry.position == stopAt)
return endOfData();
if (ifile.isEOF())
{
nextKey = null;
nextEntry = null;
}
else
{
nextKey = sstable.partitioner.decorateKey(ByteBufferUtil.readWithShortLength(ifile));
nextEntry = RowIndexEntry.serializer.deserialize(ifile, sstable.descriptor.version);
}
assert !dfile.isEOF();
if (dataRange.selectsFullRowFor(currentKey.key))
{
dfile.seek(currentEntry.position);
ByteBufferUtil.readWithShortLength(dfile); // key
if (sstable.descriptor.version.hasRowSizeAndColumnCount)
dfile.readLong();
long dataSize = (nextEntry == null ? dfile.length() : nextEntry.position) - dfile.getFilePointer();
return new SSTableIdentityIterator(sstable, dfile, currentKey, dataSize);
}
return new LazyColumnIterator(currentKey, new IColumnIteratorFactory()
{
public OnDiskAtomIterator create()
{
return dataRange.columnFilter(currentKey.key).getSSTableColumnIterator(sstable, dfile, currentKey, currentEntry);
}
});
}
catch (IOException e)
{
sstable.markSuspect();
throw new CorruptSSTableException(e, sstable.getFilename());
}
}
}
@Override
public String toString()
{
return getClass().getSimpleName() + "(" +
"dfile=" + dfile +
" ifile=" + ifile +
" sstable=" + sstable +
")";
}
}
| DavidHerzogTU-Berlin/cassandraToRun | src/java/org/apache/cassandra/io/sstable/SSTableScanner.java | Java | apache-2.0 | 8,241 |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.glacier.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.glacier.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* PartListElement JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class PartListElementJsonUnmarshaller implements Unmarshaller<PartListElement, JsonUnmarshallerContext> {
public PartListElement unmarshall(JsonUnmarshallerContext context) throws Exception {
PartListElement partListElement = new PartListElement();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("RangeInBytes", targetDepth)) {
context.nextToken();
partListElement.setRangeInBytes(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("SHA256TreeHash", targetDepth)) {
context.nextToken();
partListElement.setSHA256TreeHash(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return partListElement;
}
private static PartListElementJsonUnmarshaller instance;
public static PartListElementJsonUnmarshaller getInstance() {
if (instance == null)
instance = new PartListElementJsonUnmarshaller();
return instance;
}
}
| dagnir/aws-sdk-java | aws-java-sdk-glacier/src/main/java/com/amazonaws/services/glacier/model/transform/PartListElementJsonUnmarshaller.java | Java | apache-2.0 | 2,994 |
package seborama.demo2.kafka.model;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
public class Order {
private String id;
private boolean fulfilled;
private boolean dispatched;
private boolean completed;
public void setId(String id) {
this.id = id;
}
public void setFulfilled(Boolean fulfilled) {
this.fulfilled = fulfilled;
}
public void setDispatched(Boolean dispatched) {
this.dispatched = dispatched;
}
public void setCompleted(Boolean completed) {
this.completed = completed;
}
public String getId() {
return id;
}
public Boolean getFulfilled() {
return fulfilled;
}
public Boolean getDispatched() {
return dispatched;
}
public Boolean getCompleted() {
return completed;
}
@Override
public String toString() {
return "Order{" +
"id='" + id + '\'' +
", fulfilled=" + fulfilled +
", dispatched=" + dispatched +
", completed=" + completed +
'}';
}
}
| seborama/demo1-kafka | src/main/java/seborama/demo2/kafka/model/Order.java | Java | apache-2.0 | 1,265 |
public class ChickenBurger extends Burger {
@Override
public float price() {
return 50.5f;
}
@Override
public String name() {
return "Chicken Burger";
}
}
| Iamasoldier6/DesignPattern | BuilderPatternDemo/src/ChickenBurger.java | Java | apache-2.0 | 168 |
/* tslint:disable:max-classes-per-file */
import { forEach } from 'ramda';
import Interactor from '../CourseInteractor';
import InteractorLoader from '../CourseInteractorLoader';
import { ICourseStorage } from '../ICourseStorage';
import { IProgressStorage } from '../IProgressStorage';
import { ISerializedCourse } from '../ISerializedCourse';
import { ISerializedProgress, Progress } from '../ISerializedProgress';
const courses: { [propName: string]: ISerializedCourse } = {
'09438926-b170-4005-a6e8-5dd8fba83cde': {
id: '09438926-b170-4005-a6e8-5dd8fba83cde',
title: 'Foo bar',
children: [
{
id: '01f23c2a-b681-43db-9d27-5d8d59f62aed',
children: [
{
id: '23e20d5b-ad8e-41be-9891-5ca7b12675c4',
type: 'foo'
}
]
},
{
id: 'e194f80b-7312-43a2-995e-060f64631782',
children: [
{
id: '84fdc1a1-e3bf-4a87-8360-0c3b7beec179',
foo: 'bar',
type: 'foo'
}
]
}
]
}
};
const progresses: ISerializedProgress = {};
class MockCourseStorage implements ICourseStorage {
public getCourse(id: string) {
return new Promise<ISerializedCourse>((resolve, reject) => {
if (courses[id]) {
resolve(courses[id]);
}
reject(new Error(`There exists no course with id ${id}`));
});
}
}
class MockProgressStorage implements IProgressStorage {
public getProgress(id: string) {
return Promise.resolve(progresses);
}
public setProgress(id: string, progress: ISerializedProgress): Promise<void> {
return Promise.resolve();
}
public resetProgress() {
return Promise.resolve();
}
}
let interactorLoader: InteractorLoader;
let interactor: Interactor;
beforeEach(() => {
const storage = new MockCourseStorage();
const progress = new MockProgressStorage();
interactorLoader = new InteractorLoader(storage, progress);
});
it('loadCourse loads the course from storage if it exists', () =>
interactorLoader.loadCourse('09438926-b170-4005-a6e8-5dd8fba83cde'));
it('loadCourse fails if the course does not exist', () => {
return interactorLoader
.loadCourse('c990eacb-12af-4085-8b50-25d95d114984')
.catch(err => {
expect(err).toBeInstanceOf(Error);
});
});
describe('getStructure', () => {
beforeEach(() =>
interactorLoader
.loadCourse('09438926-b170-4005-a6e8-5dd8fba83cde')
.then(i => {
interactor = i;
})
);
it('returns the whole tree by default', () => {
expect(interactor.getStructure()).toMatchSnapshot();
});
it('returns only the first levels if a level is passed', () => {
expect(interactor.getStructure(1)).toMatchSnapshot();
});
});
describe('reset children progress', () => {
beforeEach(() =>
interactorLoader
.loadCourse('09438926-b170-4005-a6e8-5dd8fba83cde')
.then(i => {
interactor = i;
})
);
it('resets progress correctly', () => {
const root = '09438926-b170-4005-a6e8-5dd8fba83cde';
const children = [
'01f23c2a-b681-43db-9d27-5d8d59f62aed',
'23e20d5b-ad8e-41be-9891-5ca7b12675c4',
'e194f80b-7312-43a2-995e-060f64631782',
'84fdc1a1-e3bf-4a87-8360-0c3b7beec179'
];
forEach(
id => {
interactor.markAsCorrect(id);
},
[root, ...children]
);
interactor.resetChildrenProgress(root);
expect(interactor.getProgress(root).progress).toBe(Progress.Correct);
forEach(id => {
expect(interactor.getProgress(id).progress).toBe(Progress.Unseen);
}, children);
});
});
| serlo-org/serlo-abc | packages/entities-interactor/__tests__/CourseInteractor.ts | TypeScript | apache-2.0 | 3,604 |
import React from 'react';
import { action } from '@storybook/addon-actions';
import { Action } from '../Actions';
import ActionBar from './ActionBar.component';
const primary = {
label: 'Primary',
icon: 'talend-cog',
bsStyle: 'primary',
'data-feature': 'actionbar.primary',
onClick: action('You clicked me'),
};
const actions = {
left: [
primary,
{
label: 'Secondary1',
icon: 'talend-cog',
'data-feature': 'actionbar.secondary',
onClick: action('You clicked me'),
},
{
displayMode: ActionBar.DISPLAY_MODES.SPLIT_DROPDOWN,
label: 'Secondary3',
icon: 'talend-cog',
'data-feature': 'actionbar.splitdropdown',
onClick: action('on split button click'),
items: [
{
label: 'From Local',
'data-feature': 'actionbar.splitdropdown.items',
onClick: action('From Local click'),
},
{
label: 'From Remote',
'data-feature': 'actionbar.splitdropdown.items',
onClick: action('From Remote click'),
},
],
emptyDropdownLabel: 'No option',
},
{
id: 'dropdown',
displayMode: ActionBar.DISPLAY_MODES.DROPDOWN,
label: 'Dropdown',
icon: 'talend-cog',
items: [
{
label: 'From Local',
onClick: action('From Local click'),
},
{
label: 'From Remote',
onClick: action('From Remote click'),
},
],
},
],
right: [
{
label: 'Secondary4',
icon: 'talend-upload',
displayMode: 'file',
onChange: action('You changed me'),
},
{
label: 'Secondary5',
icon: 'talend-cog',
onClick: action('You clicked me'),
},
],
};
const multi3 = {
label: 'multi3',
icon: 'talend-cog',
onClick: action('You clicked me'),
};
const multiSelectActions = {
left: [
{
label: 'multi1',
icon: 'talend-cog',
onClick: action('You clicked me'),
},
{
label: 'multi2',
icon: 'talend-cog',
onClick: action('You clicked me'),
},
],
center: [
{
label: 'multi5',
icon: 'talend-cog',
onClick: action('You clicked me'),
},
],
right: [
multi3,
{
label: 'multi4',
icon: 'talend-cog',
onClick: action('You clicked me'),
},
],
};
const btnGroupActions = {
left: [
{
displayMode: ActionBar.DISPLAY_MODES.BTN_GROUP,
actions: [
{
label: 'hidden mean tooltips',
icon: 'talend-cog',
hideLabel: true,
onClick: action('cog'),
},
{
label: 'you are a super star',
icon: 'talend-badge',
hideLabel: true,
onClick: action('badge'),
},
{
label: 'but don t click this',
icon: 'talend-cross',
hideLabel: true,
onClick: action('boom'),
},
{
label: 'edit me',
icon: 'talend-pencil',
hideLabel: true,
onClick: action('oh yes'),
},
],
},
{
displayMode: ActionBar.DISPLAY_MODES.BTN_GROUP,
actions: [
{
label: 'you can also add',
icon: 'talend-plus-circle',
hideLabel: true,
onClick: action('add !'),
},
{
label: 'search',
icon: 'talend-search',
hideLabel: true,
onClick: action('search'),
},
{
label: 'star',
icon: 'talend-star',
hideLabel: true,
onClick: action('star'),
},
],
},
],
center: [
{
displayMode: ActionBar.DISPLAY_MODES.BTN_GROUP,
actions: [
{
label: 'go to dataprep',
icon: 'talend-dataprep',
hideLabel: true,
onClick: action('dataprep'),
},
{
label: 'go to elastic',
icon: 'talend-elastic',
hideLabel: true,
onClick: action('elastic'),
},
{
label: 'go to cloud engine',
icon: 'talend-cloud-engine',
hideLabel: true,
onClick: action('cloud-engine'),
},
],
},
],
right: [
{
displayMode: ActionBar.DISPLAY_MODES.BTN_GROUP,
actions: [
{
label: 'table',
icon: 'talend-table',
hideLabel: true,
onClick: action('table'),
},
{
label: 'trash',
icon: 'talend-trash',
hideLabel: true,
onClick: action('trash'),
},
],
},
],
};
const basicProps = {
actions,
multiSelectActions,
};
const multiDelete = {
label: 'Delete',
icon: 'talend-trash',
onClick: action('multiple delete'),
className: 'btn-icon-text',
};
const multiDuplicate = {
label: 'Duplicate',
icon: 'talend-files-o',
onClick: action('multiple duplicate'),
className: 'btn-icon-text',
};
const multiUpdate = {
label: 'Update',
icon: 'talend-file-move',
onClick: action('multiple update'),
className: 'btn-icon-text',
};
const multiFavorite = {
label: 'Favorite',
icon: 'talend-star',
onClick: action('multiple favorite'),
className: 'btn-icon-text',
};
const multiCertify = {
label: 'Certify',
icon: 'talend-badge',
onClick: action('multiple certify'),
className: 'btn-icon-text',
};
const massActions = {
left: [multiDelete, multiDuplicate, multiUpdate],
};
const appMassActions = {
left: [multiFavorite, multiCertify],
};
export default {
title: 'Form/Controls/ActionBar',
};
export const Default = () => (
<nav>
<p>No Selected, Layout: Left Space Right</p>
<div id="default">
<ActionBar {...basicProps} selected={0} />
</div>
<p>1 Selected, Layout: Left Center Right</p>
<div id="selected">
<ActionBar {...basicProps} selected={1} />
</div>
<p>1 Selected, Layout: Right</p>
<div id="right">
<ActionBar
selected={1}
actions={{ left: [primary] }}
multiSelectActions={{ right: [multi3] }}
/>
</div>
<p>Toolbar with btn-group and only icons/ Layout: left, center, right</p>
<div id="btn-group">
<ActionBar actions={btnGroupActions} />
</div>
<p>3 items selected, with mass/bulk Actions</p>
<div id="mass-actions">
<ActionBar
selected={3}
multiSelectActions={massActions}
appMultiSelectActions={appMassActions}
/>
</div>
</nav>
);
export const Custom = () => (
<nav>
<div id="default">
<ActionBar>
<ActionBar.Content tag="a" left href="#/foo/bar">
Hello anchor
</ActionBar.Content>
<ActionBar.Content tag="button" className="btn btn-default" left>
Hello button
</ActionBar.Content>
<ActionBar.Content left>
<Action label="hello Action" icon="talend-trash" onClick={action('onClick')} />
</ActionBar.Content>
<ActionBar.Content tag="form" role="search" center>
<div className="form-group">
<input type="text" className="form-control" placeholder="Search" />
</div>
<button type="submit" className="btn btn-default">
Submit
</button>
</ActionBar.Content>
<ActionBar.Content tag="p" right>
Hello paragraph
</ActionBar.Content>
</ActionBar>
</div>
</nav>
);
| Talend/ui | packages/components/src/ActionBar/ActionBar.stories.js | JavaScript | apache-2.0 | 6,576 |
import hashlib
from core.analytics import InlineAnalytics
from core.observables import Hash
HASH_TYPES_DICT = {
"md5": hashlib.md5,
"sha1": hashlib.sha1,
"sha256": hashlib.sha256,
"sha512": hashlib.sha512,
}
class HashFile(InlineAnalytics):
default_values = {
"name": "HashFile",
"description": "Extracts MD5, SHA1, SHA256, SHA512 hashes from file",
}
ACTS_ON = ["File", "Certificate"]
@staticmethod
def each(f):
if f.body:
f.hashes = []
for hash_type, h in HashFile.extract_hashes(f.body.contents):
hash_object = Hash.get_or_create(value=h.hexdigest())
hash_object.add_source("analytics")
hash_object.save()
f.active_link_to(
hash_object,
"{} hash".format(hash_type.upper()),
"HashFile",
clean_old=False,
)
f.hashes.append({"hash": hash_type, "value": h.hexdigest()})
f.save()
@staticmethod
def extract_hashes(body_contents):
hashers = {k: HASH_TYPES_DICT[k]() for k in HASH_TYPES_DICT}
while True:
chunk = body_contents.read(512 * 16)
if not chunk:
break
for h in hashers.values():
h.update(chunk)
return hashers.items()
| yeti-platform/yeti | plugins/analytics/public/hash_file.py | Python | apache-2.0 | 1,405 |
/**
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.dmdl.directio.text;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import java.util.function.Predicate;
import org.apache.hadoop.io.compress.CompressionCodec;
import com.asakusafw.dmdl.directio.util.CharsetUtil;
import com.asakusafw.dmdl.directio.util.ClassName;
import com.asakusafw.dmdl.directio.util.Value;
import com.asakusafw.dmdl.java.emitter.EmitContext;
import com.asakusafw.dmdl.java.util.JavaName;
import com.asakusafw.dmdl.model.BasicTypeKind;
import com.asakusafw.dmdl.semantics.ModelDeclaration;
import com.asakusafw.dmdl.semantics.PropertyDeclaration;
import com.asakusafw.dmdl.semantics.type.BasicType;
import com.asakusafw.dmdl.util.AttributeUtil;
import com.asakusafw.runtime.io.text.TextFormat;
import com.asakusafw.runtime.io.text.TextInput;
import com.asakusafw.runtime.io.text.directio.AbstractTextStreamFormat;
import com.asakusafw.runtime.io.text.driver.FieldDefinition;
import com.asakusafw.runtime.io.text.driver.RecordDefinition;
import com.asakusafw.runtime.io.text.value.BooleanOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.ByteOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.DateOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.DateTimeOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.DecimalOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.DoubleOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.FloatOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.IntOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.LongOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.ShortOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.StringOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.ValueOptionFieldAdapter;
import com.asakusafw.runtime.io.util.InputSplitter;
import com.asakusafw.runtime.io.util.InputSplitters;
import com.asakusafw.runtime.value.StringOption;
import com.asakusafw.utils.java.model.syntax.ClassDeclaration;
import com.asakusafw.utils.java.model.syntax.Expression;
import com.asakusafw.utils.java.model.syntax.InfixOperator;
import com.asakusafw.utils.java.model.syntax.MethodDeclaration;
import com.asakusafw.utils.java.model.syntax.ModelFactory;
import com.asakusafw.utils.java.model.syntax.SimpleName;
import com.asakusafw.utils.java.model.syntax.Statement;
import com.asakusafw.utils.java.model.syntax.Type;
import com.asakusafw.utils.java.model.syntax.TypeBodyDeclaration;
import com.asakusafw.utils.java.model.util.AttributeBuilder;
import com.asakusafw.utils.java.model.util.ExpressionBuilder;
import com.asakusafw.utils.java.model.util.JavadocBuilder;
import com.asakusafw.utils.java.model.util.Models;
import com.asakusafw.utils.java.model.util.TypeBuilder;
/**
* Generates {@link AbstractTextStreamFormat}.
* @since 0.9.1
*/
public abstract class AbstractTextStreamFormatGenerator {
private static final Map<BasicTypeKind, Class<? extends ValueOptionFieldAdapter<?>>> ADAPTER_TYPES;
static {
Map<BasicTypeKind, Class<? extends ValueOptionFieldAdapter<?>>> map = new EnumMap<>(BasicTypeKind.class);
map.put(BasicTypeKind.BYTE, ByteOptionFieldAdapter.class);
map.put(BasicTypeKind.SHORT, ShortOptionFieldAdapter.class);
map.put(BasicTypeKind.INT, IntOptionFieldAdapter.class);
map.put(BasicTypeKind.LONG, LongOptionFieldAdapter.class);
map.put(BasicTypeKind.FLOAT, FloatOptionFieldAdapter.class);
map.put(BasicTypeKind.DOUBLE, DoubleOptionFieldAdapter.class);
map.put(BasicTypeKind.DECIMAL, DecimalOptionFieldAdapter.class);
map.put(BasicTypeKind.TEXT, StringOptionFieldAdapter.class);
map.put(BasicTypeKind.BOOLEAN, BooleanOptionFieldAdapter.class);
map.put(BasicTypeKind.DATE, DateOptionFieldAdapter.class);
map.put(BasicTypeKind.DATETIME, DateTimeOptionFieldAdapter.class);
ADAPTER_TYPES = map;
}
/**
* The current context.
*/
protected final EmitContext context;
/**
* The target model.
*/
protected final ModelDeclaration model;
private final ModelFactory f;
private final TextFormatSettings formatSettings;
private final TextFieldSettings fieldDefaultSettings;
/**
* Creates a new instance.
* @param context the current context
* @param model the target model
* @param formatSettings the text format settings
* @param fieldDefaultSettings the field default settings
*/
public AbstractTextStreamFormatGenerator(
EmitContext context, ModelDeclaration model,
TextFormatSettings formatSettings, TextFieldSettings fieldDefaultSettings) {
this.context = context;
this.model = model;
this.formatSettings = formatSettings;
this.fieldDefaultSettings = fieldDefaultSettings;
this.f = context.getModelFactory();
}
/**
* Emits an implementation of {@link AbstractTextStreamFormat} class as a Java compilation unit.
* @param description the format description
* @throws IOException if I/O error was occurred while emitting the compilation unit
*/
protected void emit(String description) throws IOException {
ClassDeclaration decl = f.newClassDeclaration(
new JavadocBuilder(f)
.inline(Messages.getString("AbstractTextStreamFormatGenerator.javadocClassOverview"), //$NON-NLS-1$
d -> d.text(description),
d -> d.linkType(context.resolve(model.getSymbol())))
.toJavadoc(),
new AttributeBuilder(f)
.Public()
.toAttributes(),
context.getTypeName(),
f.newParameterizedType(
context.resolve(AbstractTextStreamFormat.class),
context.resolve(model.getSymbol())),
Collections.emptyList(),
createMembers());
context.emit(decl);
}
private List<? extends TypeBodyDeclaration> createMembers() {
List<TypeBodyDeclaration> results = new ArrayList<>();
results.add(createGetSupportedType());
results.add(createCreateTextFormat());
results.addAll(createCreateRecordDefinition());
createGetInputSplitter().ifPresent(results::add);
createGetCompressionCodecClass().ifPresent(results::add);
createAfterInput().ifPresent(results::add);
createBeforeOutput().ifPresent(results::add);
return results;
}
private MethodDeclaration createGetSupportedType() {
return f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Public()
.toAttributes(),
f.newParameterizedType(
context.resolve(Class.class),
context.resolve(model.getSymbol())),
f.newSimpleName("getSupportedType"), //$NON-NLS-1$
Collections.emptyList(),
Arrays.asList(new TypeBuilder(f, context.resolve(model.getSymbol()))
.dotClass()
.toReturnStatement()));
}
private MethodDeclaration createCreateTextFormat() {
return f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Public()
.toAttributes(),
context.resolve(TextFormat.class),
f.newSimpleName("createTextFormat"), //$NON-NLS-1$
Collections.emptyList(),
createGetTextFormatInternal());
}
/**
* Returns a body of {@link AbstractTextStreamFormat#getTextFormat()}.
* @return the body statements
*/
protected abstract List<Statement> createGetTextFormatInternal();
private List<MethodDeclaration> createCreateRecordDefinition() {
SimpleName builder = f.newSimpleName("builder"); //$NON-NLS-1$
List<Statement> statements = new ArrayList<>();
statements.add(new TypeBuilder(f, context.resolve(RecordDefinition.class))
.method("builder", f.newClassLiteral(context.resolve(model.getSymbol()))) //$NON-NLS-1$
.toLocalVariableDeclaration(
f.newParameterizedType(
context.resolve(RecordDefinition.Builder.class),
context.resolve(model.getSymbol())),
builder));
List<MethodDeclaration> fields = buildRecordDefinition(statements, builder);
statements.add(new ExpressionBuilder(f, builder)
.method("build") //$NON-NLS-1$
.toReturnStatement());
List<MethodDeclaration> results = new ArrayList<>();
results.add(f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Protected()
.toAttributes(),
f.newParameterizedType(
context.resolve(RecordDefinition.class),
context.resolve(model.getSymbol())),
f.newSimpleName("createRecordDefinition"), //$NON-NLS-1$
Collections.emptyList(),
statements));
results.addAll(fields);
return results;
}
private List<MethodDeclaration> buildRecordDefinition(List<Statement> statements, SimpleName builder) {
formatSettings.getHeaderType().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withHeaderType", resolve(v)) //$NON-NLS-1$
.toStatement()));
formatSettings.getLessInputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withOnLessInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
formatSettings.getMoreInputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withOnMoreInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
fieldDefaultSettings.getTrimInputWhitespaces().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withTrimInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
fieldDefaultSettings.getSkipEmptyInput().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withSkipEmptyInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
fieldDefaultSettings.getMalformedInputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withOnMalformedInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
fieldDefaultSettings.getUnmappableOutputAction().ifPresent(v -> statements.add(
new ExpressionBuilder(f, builder)
.method("withOnUnmappableOutput", resolve(v)) //$NON-NLS-1$
.toStatement()));
List<MethodDeclaration> fields = new ArrayList<>();
for (PropertyDeclaration property : model.getDeclaredProperties()) {
if (TextFieldTrait.getKind(property) != TextFieldTrait.Kind.VALUE) {
continue;
}
MethodDeclaration method = createGetFieldDefinition(property);
fields.add(method);
statements.add(new ExpressionBuilder(f, builder)
.method("withField", //$NON-NLS-1$
new TypeBuilder(f, context.resolve(model.getSymbol()))
.methodReference(context.getOptionGetterName(property))
.toExpression(),
new ExpressionBuilder(f, f.newThis())
.method(method.getName())
.toExpression())
.toStatement());
}
return fields;
}
private MethodDeclaration createGetFieldDefinition(PropertyDeclaration property) {
SimpleName builder = f.newSimpleName("builder"); //$NON-NLS-1$
List<Statement> statements = new ArrayList<>();
statements.add(new TypeBuilder(f, context.resolve(FieldDefinition.class))
.method("builder", //$NON-NLS-1$
resolve(TextFieldTrait.getName(property)),
buildFieldAdapter(property))
.toLocalVariableDeclaration(
f.newParameterizedType(
context.resolve(FieldDefinition.Builder.class),
context.getFieldType(property)),
builder));
TextFieldSettings settings = TextFieldTrait.getSettings(property);
settings.getTrimInputWhitespaces().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withTrimInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
settings.getSkipEmptyInput().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withSkipEmptyInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
settings.getMalformedInputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withOnMalformedInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
settings.getUnmappableOutputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withOnUnmappableOutput", resolve(v)) //$NON-NLS-1$
.toStatement()));
settings.getQuoteStyle().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withOutputOption", resolve(v)) //$NON-NLS-1$
.toStatement()));
statements.add(new ExpressionBuilder(f, builder)
.method("build") //$NON-NLS-1$
.toReturnStatement());
JavaName name = JavaName.of(property.getName());
name.addFirst("get"); //$NON-NLS-1$
name.addLast("field"); //$NON-NLS-1$
name.addLast("definition"); //$NON-NLS-1$
return f.newMethodDeclaration(
new JavadocBuilder(f)
.inline(Messages.getString("AbstractTextStreamFormatGenerator.javafocGetFieldDefinitionOverview"), //$NON-NLS-1$
d -> d.linkMethod(
context.resolve(model.getSymbol()),
context.getOptionGetterName(property)))
.returns()
.text(Messages.getString("AbstractTextStreamFormatGenerator.javadocGetFieldDefinitionReturn")) //$NON-NLS-1$
.toJavadoc(),
new AttributeBuilder(f)
.Protected()
.toAttributes(),
f.newParameterizedType(
context.resolve(FieldDefinition.class),
context.getFieldType(property)),
f.newSimpleName(name.toMemberName()),
Collections.emptyList(),
statements);
}
private Expression buildFieldAdapter(PropertyDeclaration property) {
TextFieldSettings settings = TextFieldTrait.getSettings(property);
Value<ClassName> adapterClass = setting(settings, TextFieldSettings::getAdapterClass);
if (adapterClass.isPresent()) {
return new TypeBuilder(f, resolve(adapterClass.getEntity()))
.constructorReference()
.toExpression();
}
BasicTypeKind kind = ((BasicType) property.getType()).getKind();
Class<? extends ValueOptionFieldAdapter<?>> basicAdapterClass = ADAPTER_TYPES.get(kind);
assert basicAdapterClass != null;
ExpressionBuilder builder = new TypeBuilder(f, context.resolve(basicAdapterClass)).method("builder"); //$NON-NLS-1$
setting(settings, TextFieldSettings::getNullFormat).ifPresent(v -> builder
.method("withNullFormat", resolve(v))); //$NON-NLS-1$
switch (kind) {
case BOOLEAN:
setting(settings, TextFieldSettings::getTrueFormat).ifPresent(v -> builder
.method("withTrueFormat", resolve(v))); //$NON-NLS-1$
setting(settings, TextFieldSettings::getFalseFormat).ifPresent(v -> builder
.method("withFalseFormat", resolve(v))); //$NON-NLS-1$
break;
case DATE:
setting(settings, TextFieldSettings::getDateFormat).ifPresent(v -> builder
.method("withDateFormat", resolve(v.toString()))); //$NON-NLS-1$
break;
case DATETIME:
setting(settings, TextFieldSettings::getDateTimeFormat).ifPresent(v -> builder
.method("withDateTimeFormat", resolve(v.toString()))); //$NON-NLS-1$
setting(settings, TextFieldSettings::getTimeZone).ifPresent(v -> builder
.method("withTimeZone", resolve(v.getId()))); //$NON-NLS-1$
break;
case DECIMAL:
setting(settings, TextFieldSettings::getNumberFormat).ifPresent(v -> builder
.method("withNumberFormat", resolve(v.toString()))); //$NON-NLS-1$
setting(settings, TextFieldSettings::getDecimalOutputStyle).ifPresent(v -> builder
.method("withOutputStyle", resolve(v))); //$NON-NLS-1$
break;
case BYTE:
case INT:
case SHORT:
case LONG:
case FLOAT:
case DOUBLE:
setting(settings, TextFieldSettings::getNumberFormat).ifPresent(v -> builder
.method("withNumberFormat", resolve(v.toString()))); //$NON-NLS-1$
break;
case TEXT:
// no special members
break;
default:
throw new AssertionError(kind);
}
return builder.method("lazy").toExpression(); //$NON-NLS-1$
}
private <T> Value<T> setting(TextFieldSettings settings, Function<TextFieldSettings, Value<T>> getter) {
return getter.apply(settings).orDefault(getter.apply(fieldDefaultSettings));
}
private Optional<MethodDeclaration> createGetInputSplitter() {
if (isSplittable()) {
return Optional.of(f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Protected()
.toAttributes(),
context.resolve(InputSplitter.class),
f.newSimpleName("getInputSplitter"), //$NON-NLS-1$
Collections.emptyList(),
Arrays.asList(new TypeBuilder(f, context.resolve(InputSplitters.class))
.method("byLineFeed") //$NON-NLS-1$
.toReturnStatement())));
} else {
return Optional.empty();
}
}
private boolean isSplittable() {
if (formatSettings.getCharset().isPresent()) {
if (!CharsetUtil.isAsciiCompatible(formatSettings.getCharset().getEntity())) {
return false;
}
}
if (formatSettings.getCompressionType().isPresent()) {
return false;
}
if (model.getDeclaredProperties().stream()
.map(TextFieldTrait::getKind)
.anyMatch(Predicate.isEqual(TextFieldTrait.Kind.LINE_NUMBER)
.or(Predicate.isEqual(TextFieldTrait.Kind.RECORD_NUMBER)))) {
return false;
}
return isSplittableInternal();
}
/**
* Returns whether or not the input is splittable.
* @return {@code true} if it is splittable, otherwise {@code false}
*/
protected abstract boolean isSplittableInternal();
private Optional<MethodDeclaration> createGetCompressionCodecClass() {
if (formatSettings.getCompressionType().isPresent()) {
ClassName codec = formatSettings.getCompressionType().getEntity();
return Optional.of(f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Protected()
.toAttributes(),
new TypeBuilder(f, context.resolve(Class.class))
.parameterize(f.newWildcardExtends(context.resolve(CompressionCodec.class)))
.toType(),
f.newSimpleName("getCompressionCodecClass"), //$NON-NLS-1$
Collections.emptyList(),
Arrays.asList(new TypeBuilder(f, resolve(codec))
.dotClass()
.toReturnStatement())));
} else {
return Optional.empty();
}
}
private Optional<MethodDeclaration> createAfterInput() {
SimpleName object = f.newSimpleName("object"); //$NON-NLS-1$
SimpleName path = f.newSimpleName("path"); //$NON-NLS-1$
SimpleName input = f.newSimpleName("input"); //$NON-NLS-1$
List<Statement> statements = new ArrayList<>();
for (PropertyDeclaration property : model.getDeclaredProperties()) {
switch (TextFieldTrait.getKind(property)) {
case VALUE:
break; // does nothing
case IGNORE:
statements.add(new ExpressionBuilder(f, object)
.method(context.getOptionSetterName(property), Models.toNullLiteral(f))
.toStatement());
break;
case FILE_NAME:
statements.add(new ExpressionBuilder(f, object)
.method(context.getOptionSetterName(property), path)
.toStatement());
break;
case LINE_NUMBER:
statements.add(new ExpressionBuilder(f, object)
.method(context.getValueSetterName(property),
adjustLong(property, new ExpressionBuilder(f, input)
.method("getLineNumber") //$NON-NLS-1$
.apply(InfixOperator.PLUS, Models.toLiteral(f, 1L))))
.toStatement());
break;
case RECORD_NUMBER:
statements.add(new ExpressionBuilder(f, object)
.method(context.getValueSetterName(property),
adjustLong(property, new ExpressionBuilder(f, input)
.method("getRecordIndex") //$NON-NLS-1$
.apply(InfixOperator.PLUS, Models.toLiteral(f, 1L))))
.toStatement());
break;
default:
throw new AssertionError(TextFieldTrait.getKind(property));
}
}
if (statements.isEmpty()) {
return Optional.empty();
} else {
return Optional.of(f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Protected()
.toAttributes(),
context.resolve(void.class),
f.newSimpleName("afterInput"), //$NON-NLS-1$
Arrays.asList(
f.newFormalParameterDeclaration(context.resolve(model.getSymbol()), object),
f.newFormalParameterDeclaration(context.resolve(StringOption.class), path),
f.newFormalParameterDeclaration(
f.newParameterizedType(
context.resolve(TextInput.class),
context.resolve(model.getSymbol())),
input)),
statements));
}
}
private Expression adjustLong(PropertyDeclaration property, ExpressionBuilder builder) {
if (AttributeUtil.hasFieldType(property, BasicTypeKind.LONG)) {
return builder.toExpression();
} else if (AttributeUtil.hasFieldType(property, BasicTypeKind.INT)) {
return builder.castTo(context.resolve(int.class)).toExpression();
} else {
throw new AssertionError(property);
}
}
private Optional<MethodDeclaration> createBeforeOutput() {
return Optional.empty();
}
/**
* Resolves a value.
* @param value the value
* @return the resolved expression
*/
protected Expression resolve(boolean value) {
return Models.toLiteral(f, value);
}
/**
* Resolves a value.
* @param value the value
* @return the resolved expression
*/
protected Expression resolve(char value) {
return Models.toLiteral(f, value);
}
/**
* Resolves a value.
* @param value the value
* @return the resolved expression
*/
protected Expression resolve(String value) {
return Models.toLiteral(f, value);
}
/**
* Resolves a value.
* @param value the value
* @return the resolved expression
*/
protected Expression resolve(Enum<?> value) {
return new TypeBuilder(f, context.resolve(value.getDeclaringClass()))
.field(value.name())
.toExpression();
}
/**
* Resolves a value.
* @param type the value
* @return the resolved expression
*/
protected Type resolve(ClassName type) {
return context.resolve(Models.toName(f, type.toString()));
}
}
| asakusafw/asakusafw | directio-project/asakusa-directio-dmdl/src/main/java/com/asakusafw/dmdl/directio/text/AbstractTextStreamFormatGenerator.java | Java | apache-2.0 | 26,894 |
/*
* Copyright 2007 Sascha Weinreuter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.intellij.plugins.relaxNG.references;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import com.intellij.codeInsight.daemon.EmptyResolveMessageProvider;
import com.intellij.codeInspection.LocalQuickFix;
import com.intellij.codeInspection.LocalQuickFixProvider;
import com.intellij.codeInspection.XmlQuickFixFactory;
import com.intellij.lang.xml.XMLLanguage;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiReference;
import com.intellij.psi.PsiReferenceProvider;
import com.intellij.psi.XmlElementFactory;
import com.intellij.psi.impl.source.resolve.reference.impl.providers.BasicAttributeValueReference;
import com.intellij.psi.impl.source.xml.SchemaPrefix;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.xml.XmlAttribute;
import com.intellij.psi.xml.XmlAttributeValue;
import com.intellij.psi.xml.XmlTag;
import com.intellij.util.ArrayUtil;
import com.intellij.util.ProcessingContext;
/*
* Created by IntelliJ IDEA.
* User: sweinreuter
* Date: 24.07.2007
*/
public class PrefixReferenceProvider extends PsiReferenceProvider
{
private static final Logger LOG = Logger.getInstance("#org.intellij.plugins.relaxNG.references.PrefixReferenceProvider");
@Override
@NotNull
public PsiReference[] getReferencesByElement(@NotNull PsiElement element, @NotNull ProcessingContext context)
{
final XmlAttributeValue value = (XmlAttributeValue) element;
final String s = value.getValue();
final int i = s.indexOf(':');
if(i <= 0 || s.startsWith("xml:"))
{
return PsiReference.EMPTY_ARRAY;
}
return new PsiReference[]{
new PrefixReference(value, i)
};
}
private static class PrefixReference extends BasicAttributeValueReference implements EmptyResolveMessageProvider, LocalQuickFixProvider
{
public PrefixReference(XmlAttributeValue value, int length)
{
super(value, TextRange.from(1, length));
}
@Override
@Nullable
public PsiElement resolve()
{
final String prefix = getCanonicalText();
XmlTag tag = PsiTreeUtil.getParentOfType(getElement(), XmlTag.class);
while(tag != null)
{
if(tag.getLocalNamespaceDeclarations().containsKey(prefix))
{
final XmlAttribute attribute = tag.getAttribute("xmlns:" + prefix, "");
final TextRange textRange = TextRange.from("xmlns:".length(), prefix.length());
return new SchemaPrefix(attribute, textRange, prefix);
}
tag = tag.getParentTag();
}
return null;
}
@Override
public boolean isReferenceTo(PsiElement element)
{
if(element instanceof SchemaPrefix && element.getContainingFile() == myElement.getContainingFile())
{
final PsiElement e = resolve();
if(e instanceof SchemaPrefix)
{
final String s = ((SchemaPrefix) e).getName();
return s != null && s.equals(((SchemaPrefix) element).getName());
}
}
return super.isReferenceTo(element);
}
@Nullable
@Override
public LocalQuickFix[] getQuickFixes()
{
final PsiElement element = getElement();
final XmlElementFactory factory = XmlElementFactory.getInstance(element.getProject());
final String value = ((XmlAttributeValue) element).getValue();
final String[] name = value.split(":");
final XmlTag tag = factory.createTagFromText("<" + (name.length > 1 ? name[1] : value) + " />", XMLLanguage.INSTANCE);
return new LocalQuickFix[]{XmlQuickFixFactory.getInstance().createNSDeclarationIntentionFix(tag, getCanonicalText(), null)};
}
@Override
@NotNull
public Object[] getVariants()
{
return ArrayUtil.EMPTY_OBJECT_ARRAY;
}
@Override
public boolean isSoft()
{
return false;
}
@Override
@NotNull
public String getUnresolvedMessagePattern()
{
return "Undefined namespace prefix ''{0}''";
}
}
} | consulo/consulo-relaxng | src/org/intellij/plugins/relaxNG/references/PrefixReferenceProvider.java | Java | apache-2.0 | 4,459 |
package org.jruby.ext.ffi.jna;
import java.util.ArrayList;
import org.jruby.runtime.ThreadContext;
/**
* An invocation session.
* This provides post-invoke cleanup.
*/
final class Invocation {
private final ThreadContext context;
private ArrayList<Runnable> postInvokeList;
Invocation(ThreadContext context) {
this.context = context;
}
void finish() {
if (postInvokeList != null) {
for (Runnable r : postInvokeList) {
r.run();
}
}
}
void addPostInvoke(Runnable postInvoke) {
if (postInvokeList == null) {
postInvokeList = new ArrayList<Runnable>();
}
postInvokeList.add(postInvoke);
}
ThreadContext getThreadContext() {
return context;
}
}
| google-code/android-scripting | jruby/src/src/org/jruby/ext/ffi/jna/Invocation.java | Java | apache-2.0 | 796 |
package org.onetwo.ext.security.utils;
import java.util.Collection;
import org.onetwo.common.web.userdetails.UserDetail;
import org.onetwo.common.web.userdetails.UserRoot;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.userdetails.User;
@SuppressWarnings("serial")
public class LoginUserDetails extends User implements UserDetail, /*SsoTokenable,*/ UserRoot {
final private long userId;
// private String token;
private String nickname;
private String avatar;
public LoginUserDetails(long userId, String username, String password,
Collection<? extends GrantedAuthority> authorities) {
super(username, password, authorities);
this.userId = userId;
}
public long getUserId() {
return userId;
}
@Override
public String getUserName() {
return getUsername();
}
@Override
public boolean isSystemRootUser() {
return userId==ROOT_USER_ID;
}
public String getNickname() {
return nickname;
}
public void setNickname(String nickname) {
this.nickname = nickname;
}
public String getAvatar() {
return avatar;
}
public void setAvatar(String avatar) {
this.avatar = avatar;
}
/*public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}*/
}
| wayshall/onetwo | core/modules/security/src/main/java/org/onetwo/ext/security/utils/LoginUserDetails.java | Java | apache-2.0 | 1,289 |
/*
<samplecode>
<abstract>
A DSPKernel subclass implementing the realtime signal processing portion of the FilterDemo audio unit.
</abstract>
</samplecode>
*/
#ifndef FilterDSPKernel_hpp
#define FilterDSPKernel_hpp
#import "DSPKernel.hpp"
#import "ParameterRamper.hpp"
#import <vector>
static inline float convertBadValuesToZero(float x) {
/*
Eliminate denormals, not-a-numbers, and infinities.
Denormals will fail the first test (absx > 1e-15), infinities will fail
the second test (absx < 1e15), and NaNs will fail both tests. Zero will
also fail both tests, but since it will get set to zero that is OK.
*/
float absx = fabs(x);
if (absx > 1e-15 && absx < 1e15) {
return x;
}
return 0.0;
}
enum {
FilterParamCutoff = 0,
FilterParamResonance = 1
};
static inline double squared(double x) {
return x * x;
}
/*
FilterDSPKernel
Performs our filter signal processing.
As a non-ObjC class, this is safe to use from render thread.
*/
class FilterDSPKernel : public DSPKernel {
public:
// MARK: Types
struct FilterState {
float x1 = 0.0;
float x2 = 0.0;
float y1 = 0.0;
float y2 = 0.0;
void clear() {
x1 = 0.0;
x2 = 0.0;
y1 = 0.0;
y2 = 0.0;
}
void convertBadStateValuesToZero() {
/*
These filters work by feedback. If an infinity or NaN should come
into the filter input, the feedback variables can become infinity
or NaN which will cause the filter to stop operating. This function
clears out any bad numbers in the feedback variables.
*/
x1 = convertBadValuesToZero(x1);
x2 = convertBadValuesToZero(x2);
y1 = convertBadValuesToZero(y1);
y2 = convertBadValuesToZero(y2);
}
};
struct BiquadCoefficients {
float a1 = 0.0;
float a2 = 0.0;
float b0 = 0.0;
float b1 = 0.0;
float b2 = 0.0;
void calculateLopassParams(double frequency, double resonance) {
/*
The transcendental function calls here could be replaced with
interpolated table lookups or other approximations.
*/
// Convert from decibels to linear.
double r = pow(10.0, 0.05 * -resonance);
double k = 0.5 * r * sin(M_PI * frequency);
double c1 = (1.0 - k) / (1.0 + k);
double c2 = (1.0 + c1) * cos(M_PI * frequency);
double c3 = (1.0 + c1 - c2) * 0.25;
b0 = float(c3);
b1 = float(2.0 * c3);
b2 = float(c3);
a1 = float(-c2);
a2 = float(c1);
}
// Arguments in Hertz.
double magnitudeForFrequency( double inFreq) {
// Cast to Double.
double _b0 = double(b0);
double _b1 = double(b1);
double _b2 = double(b2);
double _a1 = double(a1);
double _a2 = double(a2);
// Frequency on unit circle in z-plane.
double zReal = cos(M_PI * inFreq);
double zImaginary = sin(M_PI * inFreq);
// Zeros response.
double numeratorReal = (_b0 * (squared(zReal) - squared(zImaginary))) + (_b1 * zReal) + _b2;
double numeratorImaginary = (2.0 * _b0 * zReal * zImaginary) + (_b1 * zImaginary);
double numeratorMagnitude = sqrt(squared(numeratorReal) + squared(numeratorImaginary));
// Poles response.
double denominatorReal = squared(zReal) - squared(zImaginary) + (_a1 * zReal) + _a2;
double denominatorImaginary = (2.0 * zReal * zImaginary) + (_a1 * zImaginary);
double denominatorMagnitude = sqrt(squared(denominatorReal) + squared(denominatorImaginary));
// Total response.
double response = numeratorMagnitude / denominatorMagnitude;
return response;
}
};
// MARK: Member Functions
FilterDSPKernel() {}
void init(int channelCount, double inSampleRate) {
channelStates.resize(channelCount);
sampleRate = float(inSampleRate);
nyquist = 0.5 * sampleRate;
inverseNyquist = 1.0 / nyquist;
dezipperRampDuration = (AUAudioFrameCount)floor(0.02 * sampleRate);
cutoffRamper.init();
resonanceRamper.init();
}
void reset() {
cutoffRamper.reset();
resonanceRamper.reset();
for (FilterState& state : channelStates) {
state.clear();
}
}
void setParameter(AUParameterAddress address, AUValue value) {
switch (address) {
case FilterParamCutoff:
//cutoffRamper.setUIValue(clamp(value * inverseNyquist, 0.0f, 0.99f));
cutoffRamper.setUIValue(clamp(value * inverseNyquist, 0.0005444f, 0.9070295f));
break;
case FilterParamResonance:
resonanceRamper.setUIValue(clamp(value, -20.0f, 20.0f));
break;
}
}
AUValue getParameter(AUParameterAddress address) {
switch (address) {
case FilterParamCutoff:
// Return the goal. It is not thread safe to return the ramping value.
//return (cutoffRamper.getUIValue() * nyquist);
return roundf((cutoffRamper.getUIValue() * nyquist) * 100) / 100;
case FilterParamResonance:
return resonanceRamper.getUIValue();
default: return 12.0f * inverseNyquist;
}
}
void startRamp(AUParameterAddress address, AUValue value, AUAudioFrameCount duration) override {
switch (address) {
case FilterParamCutoff:
cutoffRamper.startRamp(clamp(value * inverseNyquist, 12.0f * inverseNyquist, 0.99f), duration);
break;
case FilterParamResonance:
resonanceRamper.startRamp(clamp(value, -20.0f, 20.0f), duration);
break;
}
}
void setBuffers(AudioBufferList* inBufferList, AudioBufferList* outBufferList) {
inBufferListPtr = inBufferList;
outBufferListPtr = outBufferList;
}
void process(AUAudioFrameCount frameCount, AUAudioFrameCount bufferOffset) override {
int channelCount = int(channelStates.size());
cutoffRamper.dezipperCheck(dezipperRampDuration);
resonanceRamper.dezipperCheck(dezipperRampDuration);
// For each sample.
for (int frameIndex = 0; frameIndex < frameCount; ++frameIndex) {
/*
The filter coefficients are updated every sample! This is very
expensive. You probably want to do things differently.
*/
double cutoff = double(cutoffRamper.getAndStep());
double resonance = double(resonanceRamper.getAndStep());
coeffs.calculateLopassParams(cutoff, resonance);
int frameOffset = int(frameIndex + bufferOffset);
for (int channel = 0; channel < channelCount; ++channel) {
FilterState& state = channelStates[channel];
float* in = (float*)inBufferListPtr->mBuffers[channel].mData + frameOffset;
float* out = (float*)outBufferListPtr->mBuffers[channel].mData + frameOffset;
float x0 = *in;
float y0 = (coeffs.b0 * x0) + (coeffs.b1 * state.x1) + (coeffs.b2 * state.x2) - (coeffs.a1 * state.y1) - (coeffs.a2 * state.y2);
*out = y0;
state.x2 = state.x1;
state.x1 = x0;
state.y2 = state.y1;
state.y1 = y0;
}
}
// Squelch any blowups once per cycle.
for (int channel = 0; channel < channelCount; ++channel) {
channelStates[channel].convertBadStateValuesToZero();
}
}
// MARK: Member Variables
private:
std::vector<FilterState> channelStates;
BiquadCoefficients coeffs;
float sampleRate = 44100.0;
float nyquist = 0.5 * sampleRate;
float inverseNyquist = 1.0 / nyquist;
AUAudioFrameCount dezipperRampDuration;
AudioBufferList* inBufferListPtr = nullptr;
AudioBufferList* outBufferListPtr = nullptr;
public:
// Parameters.
ParameterRamper cutoffRamper = 400.0 / 44100.0;
ParameterRamper resonanceRamper = 20.0;
};
#endif /* FilterDSPKernel_hpp */
| eviathan/Salt | Pepper/Code Examples/Apple/AudioUnitV3ExampleABasicAudioUnitExtensionandHostImplementation/Filter/Shared/FilterDSPKernel.hpp | C++ | apache-2.0 | 7,597 |
// import { Domain } from '../Domain';
// import { GetDomain } from './GetDomain';
//
// /**
// * Find domain from object or type
// */
// export function FindDomain(target: Function | object): Domain | undefined {
// let prototype;
// if (typeof target === 'function') {
// prototype = target.prototype;
// } else {
// prototype = target;
// }
//
// while (prototype) {
// const domain = GetDomain(prototype);
// if (domain) {
// // console.log('FOUND');
// return domain;
// }
// // console.log('NOT FOUND!!!');
// prototype = Reflect.getPrototypeOf(prototype);
// }
// return;
// }
| agentframework/agentframework | src/domain/Domain/Helpers/FindDomain.ts | TypeScript | apache-2.0 | 641 |
/*
* Copyright (C) 2015 P100 OG, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.shiftconnects.android.auth.example.util;
import com.google.gson.Gson;
import com.google.gson.JsonParseException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Type;
import retrofit.converter.ConversionException;
import retrofit.converter.Converter;
import retrofit.mime.MimeUtil;
import retrofit.mime.TypedInput;
import retrofit.mime.TypedOutput;
/**
* A {@link Converter} which uses GSON for serialization and deserialization of entities.
*
* @author Jake Wharton (jw@squareup.com)
*/
public class GsonConverter implements Converter {
private final Gson gson;
private String charset;
/**
* Create an instance using the supplied {@link Gson} object for conversion. Encoding to JSON and
* decoding from JSON (when no charset is specified by a header) will use UTF-8.
*/
public GsonConverter(Gson gson) {
this(gson, "UTF-8");
}
/**
* Create an instance using the supplied {@link Gson} object for conversion. Encoding to JSON and
* decoding from JSON (when no charset is specified by a header) will use the specified charset.
*/
public GsonConverter(Gson gson, String charset) {
this.gson = gson;
this.charset = charset;
}
@Override public Object fromBody(TypedInput body, Type type) throws ConversionException {
String charset = this.charset;
if (body.mimeType() != null) {
charset = MimeUtil.parseCharset(body.mimeType(), charset);
}
InputStreamReader isr = null;
try {
isr = new InputStreamReader(body.in(), charset);
return gson.fromJson(isr, type);
} catch (IOException e) {
throw new ConversionException(e);
} catch (JsonParseException e) {
throw new ConversionException(e);
} finally {
if (isr != null) {
try {
isr.close();
} catch (IOException ignored) {
}
}
}
}
@Override public TypedOutput toBody(Object object) {
try {
return new JsonTypedOutput(gson.toJson(object).getBytes(charset), charset);
} catch (UnsupportedEncodingException e) {
throw new AssertionError(e);
}
}
private static class JsonTypedOutput implements TypedOutput {
private final byte[] jsonBytes;
private final String mimeType;
JsonTypedOutput(byte[] jsonBytes, String encode) {
this.jsonBytes = jsonBytes;
this.mimeType = "application/json; charset=" + encode;
}
@Override public String fileName() {
return null;
}
@Override public String mimeType() {
return mimeType;
}
@Override public long length() {
return jsonBytes.length;
}
@Override public void writeTo(OutputStream out) throws IOException {
out.write(jsonBytes);
}
}
}
| shiftconnects/android-auth-manager | sample/src/main/java/com/shiftconnects/android/auth/example/util/GsonConverter.java | Java | apache-2.0 | 3,430 |
/*
* Copyright Strimzi authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.strimzi.api.kafka.model.connect.build;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import io.strimzi.api.kafka.model.UnknownPropertyPreserving;
import io.strimzi.crdgenerator.annotations.Description;
import lombok.EqualsAndHashCode;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
/**
* Abstract baseclass for different representations of connect build outputs, discriminated by {@link #getType() type}.
*/
@JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
include = JsonTypeInfo.As.EXISTING_PROPERTY,
property = "type"
)
@JsonSubTypes(
{
@JsonSubTypes.Type(value = DockerOutput.class, name = Output.TYPE_DOCKER),
@JsonSubTypes.Type(value = ImageStreamOutput.class, name = Output.TYPE_IMAGESTREAM)
}
)
@JsonInclude(JsonInclude.Include.NON_NULL)
@EqualsAndHashCode
public abstract class Output implements UnknownPropertyPreserving, Serializable {
private static final long serialVersionUID = 1L;
public static final String TYPE_DOCKER = "docker";
public static final String TYPE_IMAGESTREAM = "imagestream";
private String image;
private Map<String, Object> additionalProperties = new HashMap<>(0);
@Description("Output type. " +
"Must be either `docker` for pushing the newly build image to Docker compatible registry or `imagestream` for pushing the image to OpenShift ImageStream. " +
"Required.")
public abstract String getType();
@Description("The name of the image which will be built. " +
"Required")
@JsonProperty(required = true)
public String getImage() {
return image;
}
public void setImage(String image) {
this.image = image;
}
@Override
public Map<String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@Override
public void setAdditionalProperty(String name, Object value) {
this.additionalProperties.put(name, value);
}
}
| scholzj/barnabas | api/src/main/java/io/strimzi/api/kafka/model/connect/build/Output.java | Java | apache-2.0 | 2,326 |
/**
*
*/
package com.transcend.rds.worker;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.hibernate.Session;
import org.slf4j.Logger;
import org.springframework.transaction.annotation.Transactional;
import com.msi.tough.cf.json.DatabagParameter;
import com.msi.tough.core.Appctx;
import com.msi.tough.core.HibernateUtil;
import com.msi.tough.core.JsonUtil;
import com.msi.tough.model.AccountBean;
import com.msi.tough.model.rds.RdsDbinstance;
import com.msi.tough.model.rds.RdsDbparameterGroup;
import com.msi.tough.model.rds.RdsParameter;
import com.msi.tough.query.ErrorResponse;
import com.msi.tough.query.QueryFaults;
import com.msi.tough.query.ServiceRequestContext;
import com.msi.tough.rds.ValidationManager;
import com.msi.tough.rds.json.RDSConfigDatabagItem;
import com.msi.tough.rds.json.RDSDatabag;
import com.msi.tough.rds.json.RDSParameterGroupDatabagItem;
import com.msi.tough.utils.AccountUtil;
import com.msi.tough.utils.ChefUtil;
import com.msi.tough.utils.ConfigurationUtil;
import com.msi.tough.utils.Constants;
import com.msi.tough.utils.RDSQueryFaults;
import com.msi.tough.utils.rds.InstanceEntity;
import com.msi.tough.utils.rds.ParameterGroupEntity;
import com.msi.tough.utils.rds.RDSUtilities;
import com.msi.tough.workflow.core.AbstractWorker;
import com.transcend.rds.message.ModifyDBParameterGroupActionMessage.ModifyDBParameterGroupActionRequestMessage;
import com.transcend.rds.message.ModifyDBParameterGroupActionMessage.ModifyDBParameterGroupActionResultMessage;
import com.transcend.rds.message.RDSMessage.Parameter;
/**
* @author tdhite
*/
public class ModifyDBParameterGroupActionWorker extends
AbstractWorker<ModifyDBParameterGroupActionRequestMessage, ModifyDBParameterGroupActionResultMessage> {
private final static Logger logger = Appctx
.getLogger(ModifyDBParameterGroupActionWorker.class.getName());
/**
* We need a local copy of this doWork to provide the transactional
* annotation. Transaction management is handled by the annotation, which
* can only be on a concrete class.
* @param req
* @return
* @throws Exception
*/
@Transactional
public ModifyDBParameterGroupActionResultMessage doWork(
ModifyDBParameterGroupActionRequestMessage req) throws Exception {
logger.debug("Performing work for ModifyDBParameterGroupAction.");
return super.doWork(req, getSession());
}
/**
* modifyDBParameterGroup ************************************************
* This Operation modifies the parameters associated with the named
* DBParameterGroup. It essentially adds/updates parameters associated with
* a DBParameterGroup If parameter exists then update if parameter doesn't
* exist then insert Request: DBParameterGroupName(R) List of Parameter
* records(R) Parameters: List of up to 20 parameter records Response:
* DBParameterGroup Exceptions: DBParameterGroupNotFound
* InvalidDBParameterGroupState Processing 1. Confirm that ParamaterGroup
* exists and is in the appropriate state 2. Update the Parameter records by
* inserting or updating new parameter 3. Return response
*/
@Override
protected ModifyDBParameterGroupActionResultMessage doWork0(ModifyDBParameterGroupActionRequestMessage req,
ServiceRequestContext context) throws Exception {
logger.debug("ModifyDBParameterGroup action is called.");
final Session sess = HibernateUtil.newSession();
final AccountBean ac = context.getAccountBean();
final ModifyDBParameterGroupActionResultMessage.Builder resp = ModifyDBParameterGroupActionResultMessage.newBuilder();
try {
sess.beginTransaction();
final long userId = ac.getId();
final String grpName = ValidationManager.validateIdentifier(
req.getDbParameterGroupName(), 255, true);
final List<Parameter> pList = req.getParametersList();
final int pListLen = pList.size();
logger.info("ModifyDBParameterGroup: " + " UserID = " + userId
+ " ParameterGroupName = " + grpName
+ " Total Number of Listed Parameters = " + pListLen);
if (grpName.equals("default.mysql5.5")) {
throw RDSQueryFaults
.InvalidClientTokenId("You do not have privilege to modify default DBParameterGroup.");
}
// check that DBParameterGroup exists
final RdsDbparameterGroup pGrpRec = ParameterGroupEntity
.getParameterGroup(sess, grpName, ac.getId());
if (pGrpRec == null) {
throw RDSQueryFaults.DBParameterGroupNotFound();
}
final Collection<RdsDbinstance> dbInstances = InstanceEntity
.selectDBInstancesByParameterGroup(sess, grpName, -1, ac);
// make sure that all DBInstances using this DBParameterGroup are in
// available state
for (final RdsDbinstance dbinstance : dbInstances) {
if (!dbinstance.getDbinstanceStatus().equals(
RDSUtilities.STATUS_AVAILABLE)) {
throw RDSQueryFaults
.InvalidDBParameterGroupState("Currently there are DBInstance(s) that use this DBParameterGroup and it"
+ " is not in available state.");
}
}
// reset the parameters in the DB
List<RdsParameter> forRebootPending = new LinkedList<RdsParameter>();
final String paramGrpFamily = pGrpRec.getDbparameterGroupFamily();
final AccountBean sac = AccountUtil.readAccount(sess, 1L);
for (final Parameter p : pList) {
final RdsParameter target = ParameterGroupEntity.getParameter(
sess, grpName, p.getParameterName(), userId);
if (target == null) {
throw RDSQueryFaults.InvalidParameterValue(p
.getParameterName() + " parameter does not exist.");
}
logger.debug("Current target parameter: " + target.toString());
if (!target.getIsModifiable()) {
throw RDSQueryFaults.InvalidParameterValue(p
.getParameterName()
+ " is not modifiable parameter.");
}
// TODO validate p.getParameterValue along with
// p.getParameterName to ensure the value is allowed
else if (p.getApplyMethod().equals(
RDSUtilities.PARM_APPMETHOD_IMMEDIATE)) {
if (target.getApplyType().equals(
RDSUtilities.PARM_APPTYPE_STATIC)) {
throw QueryFaults
.InvalidParameterCombination(target
.getParameterName()
+ " is not dynamic. You can only"
+ " use \"pending-reboot\" as valid ApplyMethod for this parameter.");
}
target.setParameterValue(p.getParameterValue());
target.setSource(Constants.USER);
sess.save(target);
} else if (p.getApplyMethod().equals(
RDSUtilities.PARM_APPMETHOD_PENDING)) {
final RdsParameter temp = new RdsParameter();
temp.setParameterName(p.getParameterName());
temp.setApplyMethod(p.getApplyMethod());
temp.setParameterValue(p.getParameterValue());
forRebootPending.add(temp);
}
}
// Delete and regenerate the Databag
logger.debug("There are " + dbInstances.size()
+ " databags to modify.");
for (final RdsDbinstance instance : dbInstances) {
logger.debug("Currently updating the databag for DBInstance "
+ instance.getDbinstanceId());
final String databagName = "rds-" + ac.getId() + "-"
+ instance.getDbinstanceId();
logger.debug("Deleting the databag " + databagName);
ChefUtil.deleteDatabagItem(databagName, "config");
final String postWaitUrl = (String) ConfigurationUtil
.getConfiguration(Arrays.asList(new String[] {
"TRANSCEND_URL", instance.getAvailabilityZone() }));
final String servletUrl = (String) ConfigurationUtil
.getConfiguration(Arrays.asList(new String[] {
"SERVLET_URL", instance.getAvailabilityZone() }));
final RDSConfigDatabagItem configDataBagItem = new RDSConfigDatabagItem(
"config", instance.getAllocatedStorage().toString(),
instance.getMasterUsername(),
instance.getMasterUserPassword(),
instance.getAutoMinorVersionUpgrade(),
instance.getEngine(), instance.getEngineVersion(),
instance.getDbName(), instance
.getBackupRetentionPeriod().toString(),
instance.getPreferredBackupWindow(),
instance.getPreferredMaintenanceWindow(), instance
.getPort().toString(), postWaitUrl, servletUrl,
instance.getDbinstanceId(), "rds." + ac.getId() + "."
+ instance.getDbinstanceId(), ac.getId(), instance.getDbinstanceClass(), "false");
final RDSParameterGroupDatabagItem parameterGroupDatabagItem = new RDSParameterGroupDatabagItem(
"parameters", pGrpRec);
parameterGroupDatabagItem.getParameters().remove("read_only");
parameterGroupDatabagItem.getParameters().put(
"read_only",
DatabagParameter.factory("boolean",
"" + instance.getRead_only(), true, "dynamic"));
parameterGroupDatabagItem.getParameters().remove("port");
parameterGroupDatabagItem.getParameters().put(
"port",
DatabagParameter.factory("integer",
"" + instance.getPort(), false, "static"));
final RDSDatabag bag = new RDSDatabag(configDataBagItem,
parameterGroupDatabagItem);
logger.debug("Databag: "
+ JsonUtil.toJsonPrettyPrintString(bag));
logger.debug("Regenerating the databag " + databagName);
ChefUtil.createDatabagItem(databagName, "config", bag.toJson());
}
if (forRebootPending != null && forRebootPending.size() > 0) {
// forRebootPending is now a list of static parameters and
// dynamic parameters with pending-reboot ApplyMethod
forRebootPending = ParameterGroupEntity
.modifyParamGroupWithPartialList(sess, pGrpRec,
forRebootPending, userId);
// code below may need to be rewritten for better performance;
// Hibernate may be useful to improve the snippet below
for (final RdsDbinstance instance : dbInstances) {
final List<RdsParameter> alreadyPending = instance
.getPendingRebootParameters();
if (alreadyPending == null || alreadyPending.size() == 0) {
instance.setPendingRebootParameters(forRebootPending);
// instance.setDbinstanceStatus(RDSUtilities.STATUS_MODIFYING);
sess.save(instance);
} else {
for (final RdsParameter newParam : forRebootPending) {
boolean found = false;
int i = 0;
while (!found && i < alreadyPending.size()) {
if (alreadyPending.get(i).getParameterName()
.equals(newParam.getParameterName())) {
alreadyPending.get(i).setParameterValue(
newParam.getParameterValue());
found = true;
}
++i;
}
if (!found) {
alreadyPending.add(newParam);
}
}
}
}
}
// build response document - returns DBParameterGroupName
resp.setDbParameterGroupName(grpName);
logger.debug("Committing all the changes...");
sess.getTransaction().commit();
} catch (final ErrorResponse rde) {
sess.getTransaction().rollback();
throw rde;
} catch (final Exception e) {
e.printStackTrace();
sess.getTransaction().rollback();
final String msg = "CreateInstance: Class: " + e.getClass()
+ "Msg:" + e.getMessage();
logger.error(msg);
throw RDSQueryFaults.InternalFailure();
} finally {
sess.close();
}
return resp.buildPartial();
}
}
| TranscendComputing/TopStackRDS | src/com/transcend/rds/worker/ModifyDBParameterGroupActionWorker.java | Java | apache-2.0 | 11,169 |